In [1]:
!pip install tensorflow-io
Collecting tensorflow-io
  Downloading https://files.pythonhosted.org/packages/73/41/881ec181816767bd91b8f2dbb319dff8eb5ff80039ed6e003c1ab8d547d7/tensorflow_io-0.17.0-cp37-cp37m-manylinux2010_x86_64.whl (25.3MB)
     |████████████████████████████████| 25.3MB 143kB/s 
Requirement already satisfied: tensorflow<2.5.0,>=2.4.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow-io) (2.4.1)
Requirement already satisfied: numpy~=1.19.2 in /usr/local/lib/python3.7/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.19.5)
Requirement already satisfied: google-pasta~=0.2 in /usr/local/lib/python3.7/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (0.2.0)
Requirement already satisfied: grpcio~=1.32.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.32.0)
Requirement already satisfied: termcolor~=1.1.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.1.0)
Requirement already satisfied: flatbuffers~=1.12.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.12)
Requirement already satisfied: h5py~=2.10.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (2.10.0)
Requirement already satisfied: protobuf>=3.9.2 in /usr/local/lib/python3.7/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (3.12.4)
Requirement already satisfied: absl-py~=0.10 in /usr/local/lib/python3.7/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (0.10.0)
Requirement already satisfied: wrapt~=1.12.1 in /usr/local/lib/python3.7/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.12.1)
Requirement already satisfied: keras-preprocessing~=1.1.2 in /usr/local/lib/python3.7/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.1.2)
Requirement already satisfied: six~=1.15.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.15.0)
Requirement already satisfied: gast==0.3.3 in /usr/local/lib/python3.7/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (0.3.3)
Requirement already satisfied: astunparse~=1.6.3 in /usr/local/lib/python3.7/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.6.3)
Requirement already satisfied: wheel~=0.35 in /usr/local/lib/python3.7/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (0.36.2)
Requirement already satisfied: tensorboard~=2.4 in /usr/local/lib/python3.7/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (2.4.1)
Requirement already satisfied: tensorflow-estimator<2.5.0,>=2.4.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (2.4.0)
Requirement already satisfied: opt-einsum~=3.3.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (3.3.0)
Requirement already satisfied: typing-extensions~=3.7.4 in /usr/local/lib/python3.7/dist-packages (from tensorflow<2.5.0,>=2.4.0->tensorflow-io) (3.7.4.3)
Requirement already satisfied: setuptools in /usr/local/lib/python3.7/dist-packages (from protobuf>=3.9.2->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (53.0.0)
Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (3.3.3)
Requirement already satisfied: tensorboard-plugin-wit>=1.6.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.8.0)
Requirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (0.4.2)
Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.0.1)
Requirement already satisfied: requests<3,>=2.21.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (2.23.0)
Requirement already satisfied: google-auth<2,>=1.6.3 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.27.0)
Requirement already satisfied: importlib-metadata; python_version < "3.8" in /usr/local/lib/python3.7/dist-packages (from markdown>=2.6.8->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (3.4.0)
Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.7/dist-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.3.0)
Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (2020.12.5)
Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (2.10)
Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (3.0.4)
Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (1.24.3)
Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.7/dist-packages (from google-auth<2,>=1.6.3->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (0.2.8)
Requirement already satisfied: cachetools<5.0,>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from google-auth<2,>=1.6.3->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (4.2.1)
Requirement already satisfied: rsa<5,>=3.1.4; python_version >= "3.6" in /usr/local/lib/python3.7/dist-packages (from google-auth<2,>=1.6.3->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (4.7.1)
Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.7/dist-packages (from importlib-metadata; python_version < "3.8"->markdown>=2.6.8->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (3.4.0)
Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.7/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (3.1.0)
Requirement already satisfied: pyasn1<0.5.0,>=0.4.6 in /usr/local/lib/python3.7/dist-packages (from pyasn1-modules>=0.2.1->google-auth<2,>=1.6.3->tensorboard~=2.4->tensorflow<2.5.0,>=2.4.0->tensorflow-io) (0.4.8)
Installing collected packages: tensorflow-io
Successfully installed tensorflow-io-0.17.0
In [2]:
!pip install pydicom
Collecting pydicom
  Downloading https://files.pythonhosted.org/packages/f4/15/df16546bc59bfca390cf072d473fb2c8acd4231636f64356593a63137e55/pydicom-2.1.2-py3-none-any.whl (1.9MB)
     |████████████████████████████████| 1.9MB 16.3MB/s 
Installing collected packages: pydicom
Successfully installed pydicom-2.1.2
In [3]:
from google.colab import drive
drive.mount('/content/drive')
Mounted at /content/drive
In [4]:
import tensorflow as tf

AUTO = tf.data.experimental.AUTOTUNE
# Detect hardware, return appropriate distribution strategy
try:
    tpu = tf.distribute.cluster_resolver.TPUClusterResolver()  # TPU detection. No parameters necessary if TPU_NAME environment variable is set. On Kaggle this is always the case.
    print('Running on TPU ', tpu.master())
except ValueError:
    tpu = None

if tpu:
    tf.config.experimental_connect_to_cluster(tpu)
    tf.tpu.experimental.initialize_tpu_system(tpu)
    strategy = tf.distribute.experimental.TPUStrategy(tpu)
else:
    strategy = tf.distribute.get_strategy() # default distribution strategy in Tensorflow. Works on CPU and single GPU.

print("REPLICAS: ", strategy.num_replicas_in_sync)
REPLICAS:  1

Preparing the Environment

In [5]:
import tensorflow_io as tfio
import pathlib
import os
from os import listdir
from os.path import join
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import PIL
import scipy as sp
import PIL.Image
from __future__ import absolute_import, division, print_function, unicode_literals
import tensorflow_hub as hub
import functools
from glob import glob
import glob
from sklearn.model_selection import train_test_split
import pydicom
from tqdm import tqdm
import warnings
warnings.filterwarnings('ignore')
# For more information about autotune:
# https://www.tensorflow.org/guide/data_performance#prefetching
AUTOTUNE = tf.data.experimental.AUTOTUNE
print(f'Tensorflow ver. {tf.__version__}')

np.set_printoptions(precision=4)
Tensorflow ver. 2.4.1

Reading the data

In [6]:
from glob import glob
import glob

#reading all dcm files into train and test
train = sorted(glob.glob("/content/drive/MyDrive/siim-acr-pneumothorax/pneumothorax/dicom-images-train/*/*/*.dcm"))
test = sorted(glob.glob("/content/drive/MyDrive/siim-acr-pneumothorax/pneumothorax/dicom-images-test/*/*/*.dcm"))
In [7]:
# Print out the first 5 file names to verify we're in the right folder.
print ("There are total of %d DICOM images in train:" % len(train))
print('*' * 50)
print ("There are total of %d DICOM images in test:" % len(test))
There are total of 10722 DICOM images in train:
**************************************************
There are total of 1377 DICOM images in test:
In [8]:
# load masks for training images
dataset = pd.read_csv('/content/drive/MyDrive/siim-acr-pneumothorax/pneumothorax/train-rle.csv', delimiter=",")
dataset.head()
Out[8]:
ImageId EncodedPixels
0 1.2.276.0.7230010.3.1.4.8323329.5597.151787518... -1
1 1.2.276.0.7230010.3.1.4.8323329.12515.15178752... -1
2 1.2.276.0.7230010.3.1.4.8323329.4904.151787518... 175349 7 1013 12 1009 17 1005 19 1003 20 1002...
3 1.2.276.0.7230010.3.1.4.8323329.32579.15178751... 407576 2 1021 7 1015 10 1013 12 1011 14 1008 ...
4 1.2.276.0.7230010.3.1.4.8323329.32579.15178751... 252069 1 1021 3 1020 4 1018 5 1018 6 1016 7 1...
In [9]:
# add column if the file is duplicate or not
dataset['isDuplicate'] = dataset['ImageId'].duplicated()
dataset.head()
Out[9]:
ImageId EncodedPixels isDuplicate
0 1.2.276.0.7230010.3.1.4.8323329.5597.151787518... -1 False
1 1.2.276.0.7230010.3.1.4.8323329.12515.15178752... -1 False
2 1.2.276.0.7230010.3.1.4.8323329.4904.151787518... 175349 7 1013 12 1009 17 1005 19 1003 20 1002... False
3 1.2.276.0.7230010.3.1.4.8323329.32579.15178751... 407576 2 1021 7 1015 10 1013 12 1011 14 1008 ... False
4 1.2.276.0.7230010.3.1.4.8323329.32579.15178751... 252069 1 1021 3 1020 4 1018 5 1018 6 1016 7 1... True
In [10]:
# check where the files are duplicate
dupImages = dataset.index[dataset['isDuplicate']==True]
print(f"We have total {len(dupImages)} duplicate image ids")
We have total 907 duplicate image ids
In [11]:
print(f"With duplicates we have total {len(dataset)} files.")
dataset = dataset.drop(list(dupImages))
print(f"Without duplicates we have total {len(dataset)} files.")
With duplicates we have total 11582 files.
Without duplicates we have total 10675 files.
In [12]:
dataset = dataset.drop('isDuplicate', axis=1)

Accessing the dataset information

In [13]:
#dataframe to ease the access
patients = []
missing = 0
remove=[]
pd.reset_option('max_colwidth')

for t in train:
  data = pydicom.dcmread(t)
  patient = {}
  patient["UID"] = data.SOPInstanceUID
  try:
    encoded_pixels = dataset[dataset["ImageId"] == patient["UID"]].values[0][1]
    patient["EncodedPixels"] = encoded_pixels
  except:
    missing = missing + 1
    remove.append("/content/drive/MyDrive/siim-acr-pneumothorax/pneumothorax/dicom-images-train/" + data.StudyInstanceUID + "/" + data.SeriesInstanceUID + "/" + data.SOPInstanceUID + ".dcm")
  patient["Age"] = data.PatientAge
  patient["Sex"] = data.PatientSex
  patient["Modality"] = data.Modality
  patient["BodyPart"] = data.BodyPartExamined
  patient["ViewPosition"] = data.ViewPosition
  patient["path"] = "/content/drive/MyDrive/siim-acr-pneumothorax/pneumothorax/dicom-images-train/" + data.StudyInstanceUID + "/" + data.SeriesInstanceUID + "/" + data.SOPInstanceUID + ".dcm"
  patients.append(patient)
In [14]:
patients_train = pd.DataFrame(patients,columns=["UID", "EncodedPixels","path"])
In [15]:
label=[]
for i in patients_train['EncodedPixels']:
  if str(i)==" -1":
    label.append(0) #custom labelling based on the encoded pixels
  else:
    label.append(1)
patients_train['Label']=label
patients_train.head()
Out[15]:
UID EncodedPixels path Label
0 1.2.276.0.7230010.3.1.4.8323329.1000.151787516... -1 /content/drive/MyDrive/siim-acr-pneumothorax/p... 0
1 1.2.276.0.7230010.3.1.4.8323329.10000.15178752... -1 /content/drive/MyDrive/siim-acr-pneumothorax/p... 0
2 1.2.276.0.7230010.3.1.4.8323329.10001.15178752... -1 /content/drive/MyDrive/siim-acr-pneumothorax/p... 0
3 1.2.276.0.7230010.3.1.4.8323329.10002.15178752... -1 /content/drive/MyDrive/siim-acr-pneumothorax/p... 0
4 1.2.276.0.7230010.3.1.4.8323329.10003.15178752... -1 /content/drive/MyDrive/siim-acr-pneumothorax/p... 0
In [16]:
patients_train=patients_train.loc[~patients_train['path'].isin(remove)] #remove rows which do not have images
In [17]:
patients_train
Out[17]:
UID EncodedPixels path Label
0 1.2.276.0.7230010.3.1.4.8323329.1000.151787516... -1 /content/drive/MyDrive/siim-acr-pneumothorax/p... 0
1 1.2.276.0.7230010.3.1.4.8323329.10000.15178752... -1 /content/drive/MyDrive/siim-acr-pneumothorax/p... 0
2 1.2.276.0.7230010.3.1.4.8323329.10001.15178752... -1 /content/drive/MyDrive/siim-acr-pneumothorax/p... 0
3 1.2.276.0.7230010.3.1.4.8323329.10002.15178752... -1 /content/drive/MyDrive/siim-acr-pneumothorax/p... 0
4 1.2.276.0.7230010.3.1.4.8323329.10003.15178752... -1 /content/drive/MyDrive/siim-acr-pneumothorax/p... 0
... ... ... ... ...
10717 1.2.276.0.7230010.3.1.4.8323329.5792.151787519... -1 /content/drive/MyDrive/siim-acr-pneumothorax/p... 0
10718 1.2.276.0.7230010.3.1.4.8323329.5793.151787519... -1 /content/drive/MyDrive/siim-acr-pneumothorax/p... 0
10719 1.2.276.0.7230010.3.1.4.8323329.5794.151787519... -1 /content/drive/MyDrive/siim-acr-pneumothorax/p... 0
10720 1.2.276.0.7230010.3.1.4.8323329.5795.151787519... 174459 17 982 47 952 76 943 79 936 83 937 83 ... /content/drive/MyDrive/siim-acr-pneumothorax/p... 1
10721 1.2.276.0.7230010.3.1.4.8323329.5796.151787519... -1 /content/drive/MyDrive/siim-acr-pneumothorax/p... 0

10685 rows × 4 columns

In [19]:
patients_train.to_csv('/content/drive/MyDrive/siim-acr-pneumothorax/pneumothorax/final_dataset.csv') 
In [20]:
patients_train.to_csv('final.csv')

Creating a source dataset

In [21]:
file_path=patients_train['path'].values
labels=patients_train['Label'].values
In [22]:
list_ds = tf.data.Dataset.from_tensor_slices((file_path,labels))
list_ds = list_ds.shuffle(len(patients_train),seed=42)
In [23]:
def decode_img(img):
# convert the compressed string to a 3D uint8 tensor
#image_bytes = tf.io.read_file(img)
  image = tfio.image.decode_dicom_image(img, dtype=tf.uint8,color_dim=True,scale='preserve')
  image = tf.image.convert_image_dtype(image, tf.float32)#converting the image to tf.float32
  image=tf.squeeze(image,[0]) #squeezing the image because the file is of the shape (1,1024,1024,1) and we want (1024,1024,3)
  b = tf.constant([1,1,3], tf.int32)
  image=tf.tile(image,b)
  image=tf.image.resize(image,size=[256,256]) #the image is of the shape (1024,1024,1) to make it (1024,1024,3) by using tf.tile
  return image
In [24]:
def process_path(file_path,label):
  img = tf.io.read_file(file_path) #reading the image from the file path
  img = decode_img(img) #passing the image to the function
  return img,label
In [25]:
AUTOTUNE = tf.data.experimental.AUTOTUNE
list_ds = list_ds.map(process_path,num_parallel_calls=AUTOTUNE) #mapping the file paths to the above function
In [26]:
val_size = int(len(patients_train) * 0.2) #splitting to 80-20 data
train_ds = list_ds.skip(val_size)
val_ds = list_ds.take(val_size)

Applying some transformations to our dataset

The tf.data.Dataset.cache transformation can cache a dataset, either in memory or on local storage. This will save some operations (like file opening and data reading) from being executed during each epoch. The next epochs will reuse the data cached by the cache transformation.

tf.data.Dataset.prefetch overlaps the preprocessing and model execution of a training step. While the model is executing training step s, the input pipeline is reading the data for step s+1. Doing so reduces the step time to the maximum (as opposed to the sum) of the training and the time it takes to extract the data.

repeat() method of tf.data.Dataset class is used for repeating the tensors in the dataset.

shuffle () shuffles the train_dataset with a buffer of size 1500 for picking random entries.

batch() will take the first 32 entries, based on the batch size set, and make a batch out of them

In [27]:
AUTOTUNE = tf.data.experimental.AUTOTUNE
SEED = 42
BATCH_SIZE = 16
BUFFER_SIZE = 1500

# Preparing the Train dataset by applying dataset transformations

# train_ds = train_ds.repeat()
# train_ds = train_ds.cache().shuffle(buffer_size=BUFFER_SIZE, seed=SEED).batch(32, drop_remainder=True).prefetch(buffer_size=AUTOTUNE)
train_ds = train_ds.shuffle(buffer_size=BUFFER_SIZE, seed=SEED)
train_ds = train_ds.cache()
train_ds = train_ds.batch(BATCH_SIZE)
train_ds = train_ds.prefetch(buffer_size=AUTOTUNE)
print(train_ds)

# Preparing the Validation Dataset
# val_ds = val_ds.repeat()
# val_ds = val_ds.cache().shuffle(buffer_size=BUFFER_SIZE, seed=SEED).batch(32, drop_remainder=True).prefetch(buffer_size=AUTOTUNE)
val_ds = val_ds.cache()
val_ds = val_ds.batch(BATCH_SIZE)
val_ds = val_ds.prefetch(buffer_size=AUTOTUNE)
print(val_ds)
<PrefetchDataset shapes: ((None, 256, 256, None), (None,)), types: (tf.float32, tf.int64)>
<PrefetchDataset shapes: ((None, 256, 256, None), (None,)), types: (tf.float32, tf.int64)>

Building the model

In [28]:
import random, re, math
import tensorflow as tf, tensorflow.keras.backend as K
from tensorflow.keras.layers import Dense
from tensorflow.keras.models import Model
from tensorflow.keras import optimizers
from tensorflow.keras.models import Sequential
import tensorflow.keras.layers as L
from tensorflow.keras.applications import ResNet152V2, InceptionResNetV2, InceptionV3, Xception, VGG19
from tensorflow.keras.layers import Dense, GlobalAveragePooling2D
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from sklearn.model_selection import train_test_split
from tensorflow.keras.layers import Dense, Input, Dropout, Lambda, Conv2D, Conv2DTranspose, MaxPooling2D, Concatenate, Activation, Add, multiply, add, concatenate, LeakyReLU, ZeroPadding2D, UpSampling2D, BatchNormalization, Flatten
from tensorflow.keras.regularizers import l2
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.models import Model, load_model
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint, ReduceLROnPlateau
from tensorflow.keras.losses import binary_crossentropy
from tensorflow.keras import backend as K 
from tensorflow.keras.losses import binary_crossentropy
from sklearn.metrics import classification_report, confusion_matrix
from tensorflow.keras import regularizers

import matplotlib.pyplot as plt
from tensorflow.keras.applications import DenseNet121, ResNet50V2, DenseNet169, InceptionResNetV2,MobileNetV2, NASNetMobile, DenseNet201, NASNetLarge, Xception
from tensorflow.keras.layers import Average
In [29]:
model_1 =tf.keras.applications.DenseNet121(weights = "imagenet", include_top=False, input_shape = (256,256,3))

for i in model_1.layers:
  i.trainable=False

model=model_1.output
model=Conv2D(32, (3, 3))(model)
model=(Activation('relu'))(model)
model=(MaxPooling2D(pool_size=(2, 2)))(model)
model=Flatten()(model)
model = Dense(256, activation="relu")(model)
model = Dropout(0.2)(model)
model = Dense(128, activation="relu")(model)
model = Dropout(0.2)(model)
output_layer = Dense(1, activation="sigmoid")(model)
model1 = Model(model_1.input,output_layer)
model1.compile(loss = "binary_crossentropy", optimizer =tf.keras.optimizers.Adam(lr=0.0001), metrics=["accuracy",tf.keras.metrics.Precision(name='precision'),
                                                                                    tf.keras.metrics.Recall(name='recall')])
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/densenet/densenet121_weights_tf_dim_ordering_tf_kernels_notop.h5
29089792/29084464 [==============================] - 0s 0us/step
In [ ]:
model1.summary()
Model: "model"
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_1 (InputLayer)            [(None, 256, 256, 3) 0                                            
__________________________________________________________________________________________________
zero_padding2d (ZeroPadding2D)  (None, 262, 262, 3)  0           input_1[0][0]                    
__________________________________________________________________________________________________
conv1/conv (Conv2D)             (None, 128, 128, 64) 9408        zero_padding2d[0][0]             
__________________________________________________________________________________________________
conv1/bn (BatchNormalization)   (None, 128, 128, 64) 256         conv1/conv[0][0]                 
__________________________________________________________________________________________________
conv1/relu (Activation)         (None, 128, 128, 64) 0           conv1/bn[0][0]                   
__________________________________________________________________________________________________
zero_padding2d_1 (ZeroPadding2D (None, 130, 130, 64) 0           conv1/relu[0][0]                 
__________________________________________________________________________________________________
pool1 (MaxPooling2D)            (None, 64, 64, 64)   0           zero_padding2d_1[0][0]           
__________________________________________________________________________________________________
conv2_block1_0_bn (BatchNormali (None, 64, 64, 64)   256         pool1[0][0]                      
__________________________________________________________________________________________________
conv2_block1_0_relu (Activation (None, 64, 64, 64)   0           conv2_block1_0_bn[0][0]          
__________________________________________________________________________________________________
conv2_block1_1_conv (Conv2D)    (None, 64, 64, 128)  8192        conv2_block1_0_relu[0][0]        
__________________________________________________________________________________________________
conv2_block1_1_bn (BatchNormali (None, 64, 64, 128)  512         conv2_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block1_1_relu (Activation (None, 64, 64, 128)  0           conv2_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block1_2_conv (Conv2D)    (None, 64, 64, 32)   36864       conv2_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block1_concat (Concatenat (None, 64, 64, 96)   0           pool1[0][0]                      
                                                                 conv2_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block2_0_bn (BatchNormali (None, 64, 64, 96)   384         conv2_block1_concat[0][0]        
__________________________________________________________________________________________________
conv2_block2_0_relu (Activation (None, 64, 64, 96)   0           conv2_block2_0_bn[0][0]          
__________________________________________________________________________________________________
conv2_block2_1_conv (Conv2D)    (None, 64, 64, 128)  12288       conv2_block2_0_relu[0][0]        
__________________________________________________________________________________________________
conv2_block2_1_bn (BatchNormali (None, 64, 64, 128)  512         conv2_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block2_1_relu (Activation (None, 64, 64, 128)  0           conv2_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block2_2_conv (Conv2D)    (None, 64, 64, 32)   36864       conv2_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block2_concat (Concatenat (None, 64, 64, 128)  0           conv2_block1_concat[0][0]        
                                                                 conv2_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block3_0_bn (BatchNormali (None, 64, 64, 128)  512         conv2_block2_concat[0][0]        
__________________________________________________________________________________________________
conv2_block3_0_relu (Activation (None, 64, 64, 128)  0           conv2_block3_0_bn[0][0]          
__________________________________________________________________________________________________
conv2_block3_1_conv (Conv2D)    (None, 64, 64, 128)  16384       conv2_block3_0_relu[0][0]        
__________________________________________________________________________________________________
conv2_block3_1_bn (BatchNormali (None, 64, 64, 128)  512         conv2_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block3_1_relu (Activation (None, 64, 64, 128)  0           conv2_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block3_2_conv (Conv2D)    (None, 64, 64, 32)   36864       conv2_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block3_concat (Concatenat (None, 64, 64, 160)  0           conv2_block2_concat[0][0]        
                                                                 conv2_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block4_0_bn (BatchNormali (None, 64, 64, 160)  640         conv2_block3_concat[0][0]        
__________________________________________________________________________________________________
conv2_block4_0_relu (Activation (None, 64, 64, 160)  0           conv2_block4_0_bn[0][0]          
__________________________________________________________________________________________________
conv2_block4_1_conv (Conv2D)    (None, 64, 64, 128)  20480       conv2_block4_0_relu[0][0]        
__________________________________________________________________________________________________
conv2_block4_1_bn (BatchNormali (None, 64, 64, 128)  512         conv2_block4_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block4_1_relu (Activation (None, 64, 64, 128)  0           conv2_block4_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block4_2_conv (Conv2D)    (None, 64, 64, 32)   36864       conv2_block4_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block4_concat (Concatenat (None, 64, 64, 192)  0           conv2_block3_concat[0][0]        
                                                                 conv2_block4_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block5_0_bn (BatchNormali (None, 64, 64, 192)  768         conv2_block4_concat[0][0]        
__________________________________________________________________________________________________
conv2_block5_0_relu (Activation (None, 64, 64, 192)  0           conv2_block5_0_bn[0][0]          
__________________________________________________________________________________________________
conv2_block5_1_conv (Conv2D)    (None, 64, 64, 128)  24576       conv2_block5_0_relu[0][0]        
__________________________________________________________________________________________________
conv2_block5_1_bn (BatchNormali (None, 64, 64, 128)  512         conv2_block5_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block5_1_relu (Activation (None, 64, 64, 128)  0           conv2_block5_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block5_2_conv (Conv2D)    (None, 64, 64, 32)   36864       conv2_block5_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block5_concat (Concatenat (None, 64, 64, 224)  0           conv2_block4_concat[0][0]        
                                                                 conv2_block5_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block6_0_bn (BatchNormali (None, 64, 64, 224)  896         conv2_block5_concat[0][0]        
__________________________________________________________________________________________________
conv2_block6_0_relu (Activation (None, 64, 64, 224)  0           conv2_block6_0_bn[0][0]          
__________________________________________________________________________________________________
conv2_block6_1_conv (Conv2D)    (None, 64, 64, 128)  28672       conv2_block6_0_relu[0][0]        
__________________________________________________________________________________________________
conv2_block6_1_bn (BatchNormali (None, 64, 64, 128)  512         conv2_block6_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block6_1_relu (Activation (None, 64, 64, 128)  0           conv2_block6_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block6_2_conv (Conv2D)    (None, 64, 64, 32)   36864       conv2_block6_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block6_concat (Concatenat (None, 64, 64, 256)  0           conv2_block5_concat[0][0]        
                                                                 conv2_block6_2_conv[0][0]        
__________________________________________________________________________________________________
pool2_bn (BatchNormalization)   (None, 64, 64, 256)  1024        conv2_block6_concat[0][0]        
__________________________________________________________________________________________________
pool2_relu (Activation)         (None, 64, 64, 256)  0           pool2_bn[0][0]                   
__________________________________________________________________________________________________
pool2_conv (Conv2D)             (None, 64, 64, 128)  32768       pool2_relu[0][0]                 
__________________________________________________________________________________________________
pool2_pool (AveragePooling2D)   (None, 32, 32, 128)  0           pool2_conv[0][0]                 
__________________________________________________________________________________________________
conv3_block1_0_bn (BatchNormali (None, 32, 32, 128)  512         pool2_pool[0][0]                 
__________________________________________________________________________________________________
conv3_block1_0_relu (Activation (None, 32, 32, 128)  0           conv3_block1_0_bn[0][0]          
__________________________________________________________________________________________________
conv3_block1_1_conv (Conv2D)    (None, 32, 32, 128)  16384       conv3_block1_0_relu[0][0]        
__________________________________________________________________________________________________
conv3_block1_1_bn (BatchNormali (None, 32, 32, 128)  512         conv3_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block1_1_relu (Activation (None, 32, 32, 128)  0           conv3_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block1_2_conv (Conv2D)    (None, 32, 32, 32)   36864       conv3_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block1_concat (Concatenat (None, 32, 32, 160)  0           pool2_pool[0][0]                 
                                                                 conv3_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block2_0_bn (BatchNormali (None, 32, 32, 160)  640         conv3_block1_concat[0][0]        
__________________________________________________________________________________________________
conv3_block2_0_relu (Activation (None, 32, 32, 160)  0           conv3_block2_0_bn[0][0]          
__________________________________________________________________________________________________
conv3_block2_1_conv (Conv2D)    (None, 32, 32, 128)  20480       conv3_block2_0_relu[0][0]        
__________________________________________________________________________________________________
conv3_block2_1_bn (BatchNormali (None, 32, 32, 128)  512         conv3_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block2_1_relu (Activation (None, 32, 32, 128)  0           conv3_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block2_2_conv (Conv2D)    (None, 32, 32, 32)   36864       conv3_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block2_concat (Concatenat (None, 32, 32, 192)  0           conv3_block1_concat[0][0]        
                                                                 conv3_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block3_0_bn (BatchNormali (None, 32, 32, 192)  768         conv3_block2_concat[0][0]        
__________________________________________________________________________________________________
conv3_block3_0_relu (Activation (None, 32, 32, 192)  0           conv3_block3_0_bn[0][0]          
__________________________________________________________________________________________________
conv3_block3_1_conv (Conv2D)    (None, 32, 32, 128)  24576       conv3_block3_0_relu[0][0]        
__________________________________________________________________________________________________
conv3_block3_1_bn (BatchNormali (None, 32, 32, 128)  512         conv3_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block3_1_relu (Activation (None, 32, 32, 128)  0           conv3_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block3_2_conv (Conv2D)    (None, 32, 32, 32)   36864       conv3_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block3_concat (Concatenat (None, 32, 32, 224)  0           conv3_block2_concat[0][0]        
                                                                 conv3_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block4_0_bn (BatchNormali (None, 32, 32, 224)  896         conv3_block3_concat[0][0]        
__________________________________________________________________________________________________
conv3_block4_0_relu (Activation (None, 32, 32, 224)  0           conv3_block4_0_bn[0][0]          
__________________________________________________________________________________________________
conv3_block4_1_conv (Conv2D)    (None, 32, 32, 128)  28672       conv3_block4_0_relu[0][0]        
__________________________________________________________________________________________________
conv3_block4_1_bn (BatchNormali (None, 32, 32, 128)  512         conv3_block4_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block4_1_relu (Activation (None, 32, 32, 128)  0           conv3_block4_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block4_2_conv (Conv2D)    (None, 32, 32, 32)   36864       conv3_block4_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block4_concat (Concatenat (None, 32, 32, 256)  0           conv3_block3_concat[0][0]        
                                                                 conv3_block4_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block5_0_bn (BatchNormali (None, 32, 32, 256)  1024        conv3_block4_concat[0][0]        
__________________________________________________________________________________________________
conv3_block5_0_relu (Activation (None, 32, 32, 256)  0           conv3_block5_0_bn[0][0]          
__________________________________________________________________________________________________
conv3_block5_1_conv (Conv2D)    (None, 32, 32, 128)  32768       conv3_block5_0_relu[0][0]        
__________________________________________________________________________________________________
conv3_block5_1_bn (BatchNormali (None, 32, 32, 128)  512         conv3_block5_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block5_1_relu (Activation (None, 32, 32, 128)  0           conv3_block5_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block5_2_conv (Conv2D)    (None, 32, 32, 32)   36864       conv3_block5_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block5_concat (Concatenat (None, 32, 32, 288)  0           conv3_block4_concat[0][0]        
                                                                 conv3_block5_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block6_0_bn (BatchNormali (None, 32, 32, 288)  1152        conv3_block5_concat[0][0]        
__________________________________________________________________________________________________
conv3_block6_0_relu (Activation (None, 32, 32, 288)  0           conv3_block6_0_bn[0][0]          
__________________________________________________________________________________________________
conv3_block6_1_conv (Conv2D)    (None, 32, 32, 128)  36864       conv3_block6_0_relu[0][0]        
__________________________________________________________________________________________________
conv3_block6_1_bn (BatchNormali (None, 32, 32, 128)  512         conv3_block6_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block6_1_relu (Activation (None, 32, 32, 128)  0           conv3_block6_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block6_2_conv (Conv2D)    (None, 32, 32, 32)   36864       conv3_block6_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block6_concat (Concatenat (None, 32, 32, 320)  0           conv3_block5_concat[0][0]        
                                                                 conv3_block6_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block7_0_bn (BatchNormali (None, 32, 32, 320)  1280        conv3_block6_concat[0][0]        
__________________________________________________________________________________________________
conv3_block7_0_relu (Activation (None, 32, 32, 320)  0           conv3_block7_0_bn[0][0]          
__________________________________________________________________________________________________
conv3_block7_1_conv (Conv2D)    (None, 32, 32, 128)  40960       conv3_block7_0_relu[0][0]        
__________________________________________________________________________________________________
conv3_block7_1_bn (BatchNormali (None, 32, 32, 128)  512         conv3_block7_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block7_1_relu (Activation (None, 32, 32, 128)  0           conv3_block7_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block7_2_conv (Conv2D)    (None, 32, 32, 32)   36864       conv3_block7_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block7_concat (Concatenat (None, 32, 32, 352)  0           conv3_block6_concat[0][0]        
                                                                 conv3_block7_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block8_0_bn (BatchNormali (None, 32, 32, 352)  1408        conv3_block7_concat[0][0]        
__________________________________________________________________________________________________
conv3_block8_0_relu (Activation (None, 32, 32, 352)  0           conv3_block8_0_bn[0][0]          
__________________________________________________________________________________________________
conv3_block8_1_conv (Conv2D)    (None, 32, 32, 128)  45056       conv3_block8_0_relu[0][0]        
__________________________________________________________________________________________________
conv3_block8_1_bn (BatchNormali (None, 32, 32, 128)  512         conv3_block8_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block8_1_relu (Activation (None, 32, 32, 128)  0           conv3_block8_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block8_2_conv (Conv2D)    (None, 32, 32, 32)   36864       conv3_block8_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block8_concat (Concatenat (None, 32, 32, 384)  0           conv3_block7_concat[0][0]        
                                                                 conv3_block8_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block9_0_bn (BatchNormali (None, 32, 32, 384)  1536        conv3_block8_concat[0][0]        
__________________________________________________________________________________________________
conv3_block9_0_relu (Activation (None, 32, 32, 384)  0           conv3_block9_0_bn[0][0]          
__________________________________________________________________________________________________
conv3_block9_1_conv (Conv2D)    (None, 32, 32, 128)  49152       conv3_block9_0_relu[0][0]        
__________________________________________________________________________________________________
conv3_block9_1_bn (BatchNormali (None, 32, 32, 128)  512         conv3_block9_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block9_1_relu (Activation (None, 32, 32, 128)  0           conv3_block9_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block9_2_conv (Conv2D)    (None, 32, 32, 32)   36864       conv3_block9_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block9_concat (Concatenat (None, 32, 32, 416)  0           conv3_block8_concat[0][0]        
                                                                 conv3_block9_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block10_0_bn (BatchNormal (None, 32, 32, 416)  1664        conv3_block9_concat[0][0]        
__________________________________________________________________________________________________
conv3_block10_0_relu (Activatio (None, 32, 32, 416)  0           conv3_block10_0_bn[0][0]         
__________________________________________________________________________________________________
conv3_block10_1_conv (Conv2D)   (None, 32, 32, 128)  53248       conv3_block10_0_relu[0][0]       
__________________________________________________________________________________________________
conv3_block10_1_bn (BatchNormal (None, 32, 32, 128)  512         conv3_block10_1_conv[0][0]       
__________________________________________________________________________________________________
conv3_block10_1_relu (Activatio (None, 32, 32, 128)  0           conv3_block10_1_bn[0][0]         
__________________________________________________________________________________________________
conv3_block10_2_conv (Conv2D)   (None, 32, 32, 32)   36864       conv3_block10_1_relu[0][0]       
__________________________________________________________________________________________________
conv3_block10_concat (Concatena (None, 32, 32, 448)  0           conv3_block9_concat[0][0]        
                                                                 conv3_block10_2_conv[0][0]       
__________________________________________________________________________________________________
conv3_block11_0_bn (BatchNormal (None, 32, 32, 448)  1792        conv3_block10_concat[0][0]       
__________________________________________________________________________________________________
conv3_block11_0_relu (Activatio (None, 32, 32, 448)  0           conv3_block11_0_bn[0][0]         
__________________________________________________________________________________________________
conv3_block11_1_conv (Conv2D)   (None, 32, 32, 128)  57344       conv3_block11_0_relu[0][0]       
__________________________________________________________________________________________________
conv3_block11_1_bn (BatchNormal (None, 32, 32, 128)  512         conv3_block11_1_conv[0][0]       
__________________________________________________________________________________________________
conv3_block11_1_relu (Activatio (None, 32, 32, 128)  0           conv3_block11_1_bn[0][0]         
__________________________________________________________________________________________________
conv3_block11_2_conv (Conv2D)   (None, 32, 32, 32)   36864       conv3_block11_1_relu[0][0]       
__________________________________________________________________________________________________
conv3_block11_concat (Concatena (None, 32, 32, 480)  0           conv3_block10_concat[0][0]       
                                                                 conv3_block11_2_conv[0][0]       
__________________________________________________________________________________________________
conv3_block12_0_bn (BatchNormal (None, 32, 32, 480)  1920        conv3_block11_concat[0][0]       
__________________________________________________________________________________________________
conv3_block12_0_relu (Activatio (None, 32, 32, 480)  0           conv3_block12_0_bn[0][0]         
__________________________________________________________________________________________________
conv3_block12_1_conv (Conv2D)   (None, 32, 32, 128)  61440       conv3_block12_0_relu[0][0]       
__________________________________________________________________________________________________
conv3_block12_1_bn (BatchNormal (None, 32, 32, 128)  512         conv3_block12_1_conv[0][0]       
__________________________________________________________________________________________________
conv3_block12_1_relu (Activatio (None, 32, 32, 128)  0           conv3_block12_1_bn[0][0]         
__________________________________________________________________________________________________
conv3_block12_2_conv (Conv2D)   (None, 32, 32, 32)   36864       conv3_block12_1_relu[0][0]       
__________________________________________________________________________________________________
conv3_block12_concat (Concatena (None, 32, 32, 512)  0           conv3_block11_concat[0][0]       
                                                                 conv3_block12_2_conv[0][0]       
__________________________________________________________________________________________________
pool3_bn (BatchNormalization)   (None, 32, 32, 512)  2048        conv3_block12_concat[0][0]       
__________________________________________________________________________________________________
pool3_relu (Activation)         (None, 32, 32, 512)  0           pool3_bn[0][0]                   
__________________________________________________________________________________________________
pool3_conv (Conv2D)             (None, 32, 32, 256)  131072      pool3_relu[0][0]                 
__________________________________________________________________________________________________
pool3_pool (AveragePooling2D)   (None, 16, 16, 256)  0           pool3_conv[0][0]                 
__________________________________________________________________________________________________
conv4_block1_0_bn (BatchNormali (None, 16, 16, 256)  1024        pool3_pool[0][0]                 
__________________________________________________________________________________________________
conv4_block1_0_relu (Activation (None, 16, 16, 256)  0           conv4_block1_0_bn[0][0]          
__________________________________________________________________________________________________
conv4_block1_1_conv (Conv2D)    (None, 16, 16, 128)  32768       conv4_block1_0_relu[0][0]        
__________________________________________________________________________________________________
conv4_block1_1_bn (BatchNormali (None, 16, 16, 128)  512         conv4_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block1_1_relu (Activation (None, 16, 16, 128)  0           conv4_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block1_2_conv (Conv2D)    (None, 16, 16, 32)   36864       conv4_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block1_concat (Concatenat (None, 16, 16, 288)  0           pool3_pool[0][0]                 
                                                                 conv4_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block2_0_bn (BatchNormali (None, 16, 16, 288)  1152        conv4_block1_concat[0][0]        
__________________________________________________________________________________________________
conv4_block2_0_relu (Activation (None, 16, 16, 288)  0           conv4_block2_0_bn[0][0]          
__________________________________________________________________________________________________
conv4_block2_1_conv (Conv2D)    (None, 16, 16, 128)  36864       conv4_block2_0_relu[0][0]        
__________________________________________________________________________________________________
conv4_block2_1_bn (BatchNormali (None, 16, 16, 128)  512         conv4_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block2_1_relu (Activation (None, 16, 16, 128)  0           conv4_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block2_2_conv (Conv2D)    (None, 16, 16, 32)   36864       conv4_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block2_concat (Concatenat (None, 16, 16, 320)  0           conv4_block1_concat[0][0]        
                                                                 conv4_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block3_0_bn (BatchNormali (None, 16, 16, 320)  1280        conv4_block2_concat[0][0]        
__________________________________________________________________________________________________
conv4_block3_0_relu (Activation (None, 16, 16, 320)  0           conv4_block3_0_bn[0][0]          
__________________________________________________________________________________________________
conv4_block3_1_conv (Conv2D)    (None, 16, 16, 128)  40960       conv4_block3_0_relu[0][0]        
__________________________________________________________________________________________________
conv4_block3_1_bn (BatchNormali (None, 16, 16, 128)  512         conv4_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block3_1_relu (Activation (None, 16, 16, 128)  0           conv4_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block3_2_conv (Conv2D)    (None, 16, 16, 32)   36864       conv4_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block3_concat (Concatenat (None, 16, 16, 352)  0           conv4_block2_concat[0][0]        
                                                                 conv4_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block4_0_bn (BatchNormali (None, 16, 16, 352)  1408        conv4_block3_concat[0][0]        
__________________________________________________________________________________________________
conv4_block4_0_relu (Activation (None, 16, 16, 352)  0           conv4_block4_0_bn[0][0]          
__________________________________________________________________________________________________
conv4_block4_1_conv (Conv2D)    (None, 16, 16, 128)  45056       conv4_block4_0_relu[0][0]        
__________________________________________________________________________________________________
conv4_block4_1_bn (BatchNormali (None, 16, 16, 128)  512         conv4_block4_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block4_1_relu (Activation (None, 16, 16, 128)  0           conv4_block4_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block4_2_conv (Conv2D)    (None, 16, 16, 32)   36864       conv4_block4_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block4_concat (Concatenat (None, 16, 16, 384)  0           conv4_block3_concat[0][0]        
                                                                 conv4_block4_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block5_0_bn (BatchNormali (None, 16, 16, 384)  1536        conv4_block4_concat[0][0]        
__________________________________________________________________________________________________
conv4_block5_0_relu (Activation (None, 16, 16, 384)  0           conv4_block5_0_bn[0][0]          
__________________________________________________________________________________________________
conv4_block5_1_conv (Conv2D)    (None, 16, 16, 128)  49152       conv4_block5_0_relu[0][0]        
__________________________________________________________________________________________________
conv4_block5_1_bn (BatchNormali (None, 16, 16, 128)  512         conv4_block5_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block5_1_relu (Activation (None, 16, 16, 128)  0           conv4_block5_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block5_2_conv (Conv2D)    (None, 16, 16, 32)   36864       conv4_block5_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block5_concat (Concatenat (None, 16, 16, 416)  0           conv4_block4_concat[0][0]        
                                                                 conv4_block5_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block6_0_bn (BatchNormali (None, 16, 16, 416)  1664        conv4_block5_concat[0][0]        
__________________________________________________________________________________________________
conv4_block6_0_relu (Activation (None, 16, 16, 416)  0           conv4_block6_0_bn[0][0]          
__________________________________________________________________________________________________
conv4_block6_1_conv (Conv2D)    (None, 16, 16, 128)  53248       conv4_block6_0_relu[0][0]        
__________________________________________________________________________________________________
conv4_block6_1_bn (BatchNormali (None, 16, 16, 128)  512         conv4_block6_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block6_1_relu (Activation (None, 16, 16, 128)  0           conv4_block6_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block6_2_conv (Conv2D)    (None, 16, 16, 32)   36864       conv4_block6_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block6_concat (Concatenat (None, 16, 16, 448)  0           conv4_block5_concat[0][0]        
                                                                 conv4_block6_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block7_0_bn (BatchNormali (None, 16, 16, 448)  1792        conv4_block6_concat[0][0]        
__________________________________________________________________________________________________
conv4_block7_0_relu (Activation (None, 16, 16, 448)  0           conv4_block7_0_bn[0][0]          
__________________________________________________________________________________________________
conv4_block7_1_conv (Conv2D)    (None, 16, 16, 128)  57344       conv4_block7_0_relu[0][0]        
__________________________________________________________________________________________________
conv4_block7_1_bn (BatchNormali (None, 16, 16, 128)  512         conv4_block7_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block7_1_relu (Activation (None, 16, 16, 128)  0           conv4_block7_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block7_2_conv (Conv2D)    (None, 16, 16, 32)   36864       conv4_block7_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block7_concat (Concatenat (None, 16, 16, 480)  0           conv4_block6_concat[0][0]        
                                                                 conv4_block7_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block8_0_bn (BatchNormali (None, 16, 16, 480)  1920        conv4_block7_concat[0][0]        
__________________________________________________________________________________________________
conv4_block8_0_relu (Activation (None, 16, 16, 480)  0           conv4_block8_0_bn[0][0]          
__________________________________________________________________________________________________
conv4_block8_1_conv (Conv2D)    (None, 16, 16, 128)  61440       conv4_block8_0_relu[0][0]        
__________________________________________________________________________________________________
conv4_block8_1_bn (BatchNormali (None, 16, 16, 128)  512         conv4_block8_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block8_1_relu (Activation (None, 16, 16, 128)  0           conv4_block8_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block8_2_conv (Conv2D)    (None, 16, 16, 32)   36864       conv4_block8_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block8_concat (Concatenat (None, 16, 16, 512)  0           conv4_block7_concat[0][0]        
                                                                 conv4_block8_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block9_0_bn (BatchNormali (None, 16, 16, 512)  2048        conv4_block8_concat[0][0]        
__________________________________________________________________________________________________
conv4_block9_0_relu (Activation (None, 16, 16, 512)  0           conv4_block9_0_bn[0][0]          
__________________________________________________________________________________________________
conv4_block9_1_conv (Conv2D)    (None, 16, 16, 128)  65536       conv4_block9_0_relu[0][0]        
__________________________________________________________________________________________________
conv4_block9_1_bn (BatchNormali (None, 16, 16, 128)  512         conv4_block9_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block9_1_relu (Activation (None, 16, 16, 128)  0           conv4_block9_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block9_2_conv (Conv2D)    (None, 16, 16, 32)   36864       conv4_block9_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block9_concat (Concatenat (None, 16, 16, 544)  0           conv4_block8_concat[0][0]        
                                                                 conv4_block9_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block10_0_bn (BatchNormal (None, 16, 16, 544)  2176        conv4_block9_concat[0][0]        
__________________________________________________________________________________________________
conv4_block10_0_relu (Activatio (None, 16, 16, 544)  0           conv4_block10_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block10_1_conv (Conv2D)   (None, 16, 16, 128)  69632       conv4_block10_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block10_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block10_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block10_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block10_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block10_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block10_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block10_concat (Concatena (None, 16, 16, 576)  0           conv4_block9_concat[0][0]        
                                                                 conv4_block10_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block11_0_bn (BatchNormal (None, 16, 16, 576)  2304        conv4_block10_concat[0][0]       
__________________________________________________________________________________________________
conv4_block11_0_relu (Activatio (None, 16, 16, 576)  0           conv4_block11_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block11_1_conv (Conv2D)   (None, 16, 16, 128)  73728       conv4_block11_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block11_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block11_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block11_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block11_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block11_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block11_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block11_concat (Concatena (None, 16, 16, 608)  0           conv4_block10_concat[0][0]       
                                                                 conv4_block11_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block12_0_bn (BatchNormal (None, 16, 16, 608)  2432        conv4_block11_concat[0][0]       
__________________________________________________________________________________________________
conv4_block12_0_relu (Activatio (None, 16, 16, 608)  0           conv4_block12_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block12_1_conv (Conv2D)   (None, 16, 16, 128)  77824       conv4_block12_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block12_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block12_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block12_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block12_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block12_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block12_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block12_concat (Concatena (None, 16, 16, 640)  0           conv4_block11_concat[0][0]       
                                                                 conv4_block12_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block13_0_bn (BatchNormal (None, 16, 16, 640)  2560        conv4_block12_concat[0][0]       
__________________________________________________________________________________________________
conv4_block13_0_relu (Activatio (None, 16, 16, 640)  0           conv4_block13_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block13_1_conv (Conv2D)   (None, 16, 16, 128)  81920       conv4_block13_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block13_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block13_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block13_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block13_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block13_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block13_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block13_concat (Concatena (None, 16, 16, 672)  0           conv4_block12_concat[0][0]       
                                                                 conv4_block13_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block14_0_bn (BatchNormal (None, 16, 16, 672)  2688        conv4_block13_concat[0][0]       
__________________________________________________________________________________________________
conv4_block14_0_relu (Activatio (None, 16, 16, 672)  0           conv4_block14_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block14_1_conv (Conv2D)   (None, 16, 16, 128)  86016       conv4_block14_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block14_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block14_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block14_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block14_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block14_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block14_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block14_concat (Concatena (None, 16, 16, 704)  0           conv4_block13_concat[0][0]       
                                                                 conv4_block14_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block15_0_bn (BatchNormal (None, 16, 16, 704)  2816        conv4_block14_concat[0][0]       
__________________________________________________________________________________________________
conv4_block15_0_relu (Activatio (None, 16, 16, 704)  0           conv4_block15_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block15_1_conv (Conv2D)   (None, 16, 16, 128)  90112       conv4_block15_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block15_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block15_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block15_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block15_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block15_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block15_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block15_concat (Concatena (None, 16, 16, 736)  0           conv4_block14_concat[0][0]       
                                                                 conv4_block15_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block16_0_bn (BatchNormal (None, 16, 16, 736)  2944        conv4_block15_concat[0][0]       
__________________________________________________________________________________________________
conv4_block16_0_relu (Activatio (None, 16, 16, 736)  0           conv4_block16_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block16_1_conv (Conv2D)   (None, 16, 16, 128)  94208       conv4_block16_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block16_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block16_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block16_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block16_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block16_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block16_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block16_concat (Concatena (None, 16, 16, 768)  0           conv4_block15_concat[0][0]       
                                                                 conv4_block16_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block17_0_bn (BatchNormal (None, 16, 16, 768)  3072        conv4_block16_concat[0][0]       
__________________________________________________________________________________________________
conv4_block17_0_relu (Activatio (None, 16, 16, 768)  0           conv4_block17_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block17_1_conv (Conv2D)   (None, 16, 16, 128)  98304       conv4_block17_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block17_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block17_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block17_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block17_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block17_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block17_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block17_concat (Concatena (None, 16, 16, 800)  0           conv4_block16_concat[0][0]       
                                                                 conv4_block17_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block18_0_bn (BatchNormal (None, 16, 16, 800)  3200        conv4_block17_concat[0][0]       
__________________________________________________________________________________________________
conv4_block18_0_relu (Activatio (None, 16, 16, 800)  0           conv4_block18_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block18_1_conv (Conv2D)   (None, 16, 16, 128)  102400      conv4_block18_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block18_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block18_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block18_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block18_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block18_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block18_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block18_concat (Concatena (None, 16, 16, 832)  0           conv4_block17_concat[0][0]       
                                                                 conv4_block18_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block19_0_bn (BatchNormal (None, 16, 16, 832)  3328        conv4_block18_concat[0][0]       
__________________________________________________________________________________________________
conv4_block19_0_relu (Activatio (None, 16, 16, 832)  0           conv4_block19_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block19_1_conv (Conv2D)   (None, 16, 16, 128)  106496      conv4_block19_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block19_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block19_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block19_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block19_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block19_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block19_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block19_concat (Concatena (None, 16, 16, 864)  0           conv4_block18_concat[0][0]       
                                                                 conv4_block19_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block20_0_bn (BatchNormal (None, 16, 16, 864)  3456        conv4_block19_concat[0][0]       
__________________________________________________________________________________________________
conv4_block20_0_relu (Activatio (None, 16, 16, 864)  0           conv4_block20_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block20_1_conv (Conv2D)   (None, 16, 16, 128)  110592      conv4_block20_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block20_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block20_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block20_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block20_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block20_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block20_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block20_concat (Concatena (None, 16, 16, 896)  0           conv4_block19_concat[0][0]       
                                                                 conv4_block20_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block21_0_bn (BatchNormal (None, 16, 16, 896)  3584        conv4_block20_concat[0][0]       
__________________________________________________________________________________________________
conv4_block21_0_relu (Activatio (None, 16, 16, 896)  0           conv4_block21_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block21_1_conv (Conv2D)   (None, 16, 16, 128)  114688      conv4_block21_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block21_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block21_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block21_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block21_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block21_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block21_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block21_concat (Concatena (None, 16, 16, 928)  0           conv4_block20_concat[0][0]       
                                                                 conv4_block21_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block22_0_bn (BatchNormal (None, 16, 16, 928)  3712        conv4_block21_concat[0][0]       
__________________________________________________________________________________________________
conv4_block22_0_relu (Activatio (None, 16, 16, 928)  0           conv4_block22_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block22_1_conv (Conv2D)   (None, 16, 16, 128)  118784      conv4_block22_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block22_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block22_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block22_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block22_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block22_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block22_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block22_concat (Concatena (None, 16, 16, 960)  0           conv4_block21_concat[0][0]       
                                                                 conv4_block22_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block23_0_bn (BatchNormal (None, 16, 16, 960)  3840        conv4_block22_concat[0][0]       
__________________________________________________________________________________________________
conv4_block23_0_relu (Activatio (None, 16, 16, 960)  0           conv4_block23_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block23_1_conv (Conv2D)   (None, 16, 16, 128)  122880      conv4_block23_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block23_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block23_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block23_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block23_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block23_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block23_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block23_concat (Concatena (None, 16, 16, 992)  0           conv4_block22_concat[0][0]       
                                                                 conv4_block23_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block24_0_bn (BatchNormal (None, 16, 16, 992)  3968        conv4_block23_concat[0][0]       
__________________________________________________________________________________________________
conv4_block24_0_relu (Activatio (None, 16, 16, 992)  0           conv4_block24_0_bn[0][0]         
__________________________________________________________________________________________________
conv4_block24_1_conv (Conv2D)   (None, 16, 16, 128)  126976      conv4_block24_0_relu[0][0]       
__________________________________________________________________________________________________
conv4_block24_1_bn (BatchNormal (None, 16, 16, 128)  512         conv4_block24_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block24_1_relu (Activatio (None, 16, 16, 128)  0           conv4_block24_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block24_2_conv (Conv2D)   (None, 16, 16, 32)   36864       conv4_block24_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block24_concat (Concatena (None, 16, 16, 1024) 0           conv4_block23_concat[0][0]       
                                                                 conv4_block24_2_conv[0][0]       
__________________________________________________________________________________________________
pool4_bn (BatchNormalization)   (None, 16, 16, 1024) 4096        conv4_block24_concat[0][0]       
__________________________________________________________________________________________________
pool4_relu (Activation)         (None, 16, 16, 1024) 0           pool4_bn[0][0]                   
__________________________________________________________________________________________________
pool4_conv (Conv2D)             (None, 16, 16, 512)  524288      pool4_relu[0][0]                 
__________________________________________________________________________________________________
pool4_pool (AveragePooling2D)   (None, 8, 8, 512)    0           pool4_conv[0][0]                 
__________________________________________________________________________________________________
conv5_block1_0_bn (BatchNormali (None, 8, 8, 512)    2048        pool4_pool[0][0]                 
__________________________________________________________________________________________________
conv5_block1_0_relu (Activation (None, 8, 8, 512)    0           conv5_block1_0_bn[0][0]          
__________________________________________________________________________________________________
conv5_block1_1_conv (Conv2D)    (None, 8, 8, 128)    65536       conv5_block1_0_relu[0][0]        
__________________________________________________________________________________________________
conv5_block1_1_bn (BatchNormali (None, 8, 8, 128)    512         conv5_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block1_1_relu (Activation (None, 8, 8, 128)    0           conv5_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block1_2_conv (Conv2D)    (None, 8, 8, 32)     36864       conv5_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block1_concat (Concatenat (None, 8, 8, 544)    0           pool4_pool[0][0]                 
                                                                 conv5_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block2_0_bn (BatchNormali (None, 8, 8, 544)    2176        conv5_block1_concat[0][0]        
__________________________________________________________________________________________________
conv5_block2_0_relu (Activation (None, 8, 8, 544)    0           conv5_block2_0_bn[0][0]          
__________________________________________________________________________________________________
conv5_block2_1_conv (Conv2D)    (None, 8, 8, 128)    69632       conv5_block2_0_relu[0][0]        
__________________________________________________________________________________________________
conv5_block2_1_bn (BatchNormali (None, 8, 8, 128)    512         conv5_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block2_1_relu (Activation (None, 8, 8, 128)    0           conv5_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block2_2_conv (Conv2D)    (None, 8, 8, 32)     36864       conv5_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block2_concat (Concatenat (None, 8, 8, 576)    0           conv5_block1_concat[0][0]        
                                                                 conv5_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block3_0_bn (BatchNormali (None, 8, 8, 576)    2304        conv5_block2_concat[0][0]        
__________________________________________________________________________________________________
conv5_block3_0_relu (Activation (None, 8, 8, 576)    0           conv5_block3_0_bn[0][0]          
__________________________________________________________________________________________________
conv5_block3_1_conv (Conv2D)    (None, 8, 8, 128)    73728       conv5_block3_0_relu[0][0]        
__________________________________________________________________________________________________
conv5_block3_1_bn (BatchNormali (None, 8, 8, 128)    512         conv5_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block3_1_relu (Activation (None, 8, 8, 128)    0           conv5_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block3_2_conv (Conv2D)    (None, 8, 8, 32)     36864       conv5_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block3_concat (Concatenat (None, 8, 8, 608)    0           conv5_block2_concat[0][0]        
                                                                 conv5_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block4_0_bn (BatchNormali (None, 8, 8, 608)    2432        conv5_block3_concat[0][0]        
__________________________________________________________________________________________________
conv5_block4_0_relu (Activation (None, 8, 8, 608)    0           conv5_block4_0_bn[0][0]          
__________________________________________________________________________________________________
conv5_block4_1_conv (Conv2D)    (None, 8, 8, 128)    77824       conv5_block4_0_relu[0][0]        
__________________________________________________________________________________________________
conv5_block4_1_bn (BatchNormali (None, 8, 8, 128)    512         conv5_block4_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block4_1_relu (Activation (None, 8, 8, 128)    0           conv5_block4_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block4_2_conv (Conv2D)    (None, 8, 8, 32)     36864       conv5_block4_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block4_concat (Concatenat (None, 8, 8, 640)    0           conv5_block3_concat[0][0]        
                                                                 conv5_block4_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block5_0_bn (BatchNormali (None, 8, 8, 640)    2560        conv5_block4_concat[0][0]        
__________________________________________________________________________________________________
conv5_block5_0_relu (Activation (None, 8, 8, 640)    0           conv5_block5_0_bn[0][0]          
__________________________________________________________________________________________________
conv5_block5_1_conv (Conv2D)    (None, 8, 8, 128)    81920       conv5_block5_0_relu[0][0]        
__________________________________________________________________________________________________
conv5_block5_1_bn (BatchNormali (None, 8, 8, 128)    512         conv5_block5_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block5_1_relu (Activation (None, 8, 8, 128)    0           conv5_block5_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block5_2_conv (Conv2D)    (None, 8, 8, 32)     36864       conv5_block5_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block5_concat (Concatenat (None, 8, 8, 672)    0           conv5_block4_concat[0][0]        
                                                                 conv5_block5_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block6_0_bn (BatchNormali (None, 8, 8, 672)    2688        conv5_block5_concat[0][0]        
__________________________________________________________________________________________________
conv5_block6_0_relu (Activation (None, 8, 8, 672)    0           conv5_block6_0_bn[0][0]          
__________________________________________________________________________________________________
conv5_block6_1_conv (Conv2D)    (None, 8, 8, 128)    86016       conv5_block6_0_relu[0][0]        
__________________________________________________________________________________________________
conv5_block6_1_bn (BatchNormali (None, 8, 8, 128)    512         conv5_block6_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block6_1_relu (Activation (None, 8, 8, 128)    0           conv5_block6_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block6_2_conv (Conv2D)    (None, 8, 8, 32)     36864       conv5_block6_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block6_concat (Concatenat (None, 8, 8, 704)    0           conv5_block5_concat[0][0]        
                                                                 conv5_block6_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block7_0_bn (BatchNormali (None, 8, 8, 704)    2816        conv5_block6_concat[0][0]        
__________________________________________________________________________________________________
conv5_block7_0_relu (Activation (None, 8, 8, 704)    0           conv5_block7_0_bn[0][0]          
__________________________________________________________________________________________________
conv5_block7_1_conv (Conv2D)    (None, 8, 8, 128)    90112       conv5_block7_0_relu[0][0]        
__________________________________________________________________________________________________
conv5_block7_1_bn (BatchNormali (None, 8, 8, 128)    512         conv5_block7_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block7_1_relu (Activation (None, 8, 8, 128)    0           conv5_block7_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block7_2_conv (Conv2D)    (None, 8, 8, 32)     36864       conv5_block7_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block7_concat (Concatenat (None, 8, 8, 736)    0           conv5_block6_concat[0][0]        
                                                                 conv5_block7_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block8_0_bn (BatchNormali (None, 8, 8, 736)    2944        conv5_block7_concat[0][0]        
__________________________________________________________________________________________________
conv5_block8_0_relu (Activation (None, 8, 8, 736)    0           conv5_block8_0_bn[0][0]          
__________________________________________________________________________________________________
conv5_block8_1_conv (Conv2D)    (None, 8, 8, 128)    94208       conv5_block8_0_relu[0][0]        
__________________________________________________________________________________________________
conv5_block8_1_bn (BatchNormali (None, 8, 8, 128)    512         conv5_block8_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block8_1_relu (Activation (None, 8, 8, 128)    0           conv5_block8_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block8_2_conv (Conv2D)    (None, 8, 8, 32)     36864       conv5_block8_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block8_concat (Concatenat (None, 8, 8, 768)    0           conv5_block7_concat[0][0]        
                                                                 conv5_block8_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block9_0_bn (BatchNormali (None, 8, 8, 768)    3072        conv5_block8_concat[0][0]        
__________________________________________________________________________________________________
conv5_block9_0_relu (Activation (None, 8, 8, 768)    0           conv5_block9_0_bn[0][0]          
__________________________________________________________________________________________________
conv5_block9_1_conv (Conv2D)    (None, 8, 8, 128)    98304       conv5_block9_0_relu[0][0]        
__________________________________________________________________________________________________
conv5_block9_1_bn (BatchNormali (None, 8, 8, 128)    512         conv5_block9_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block9_1_relu (Activation (None, 8, 8, 128)    0           conv5_block9_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block9_2_conv (Conv2D)    (None, 8, 8, 32)     36864       conv5_block9_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block9_concat (Concatenat (None, 8, 8, 800)    0           conv5_block8_concat[0][0]        
                                                                 conv5_block9_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block10_0_bn (BatchNormal (None, 8, 8, 800)    3200        conv5_block9_concat[0][0]        
__________________________________________________________________________________________________
conv5_block10_0_relu (Activatio (None, 8, 8, 800)    0           conv5_block10_0_bn[0][0]         
__________________________________________________________________________________________________
conv5_block10_1_conv (Conv2D)   (None, 8, 8, 128)    102400      conv5_block10_0_relu[0][0]       
__________________________________________________________________________________________________
conv5_block10_1_bn (BatchNormal (None, 8, 8, 128)    512         conv5_block10_1_conv[0][0]       
__________________________________________________________________________________________________
conv5_block10_1_relu (Activatio (None, 8, 8, 128)    0           conv5_block10_1_bn[0][0]         
__________________________________________________________________________________________________
conv5_block10_2_conv (Conv2D)   (None, 8, 8, 32)     36864       conv5_block10_1_relu[0][0]       
__________________________________________________________________________________________________
conv5_block10_concat (Concatena (None, 8, 8, 832)    0           conv5_block9_concat[0][0]        
                                                                 conv5_block10_2_conv[0][0]       
__________________________________________________________________________________________________
conv5_block11_0_bn (BatchNormal (None, 8, 8, 832)    3328        conv5_block10_concat[0][0]       
__________________________________________________________________________________________________
conv5_block11_0_relu (Activatio (None, 8, 8, 832)    0           conv5_block11_0_bn[0][0]         
__________________________________________________________________________________________________
conv5_block11_1_conv (Conv2D)   (None, 8, 8, 128)    106496      conv5_block11_0_relu[0][0]       
__________________________________________________________________________________________________
conv5_block11_1_bn (BatchNormal (None, 8, 8, 128)    512         conv5_block11_1_conv[0][0]       
__________________________________________________________________________________________________
conv5_block11_1_relu (Activatio (None, 8, 8, 128)    0           conv5_block11_1_bn[0][0]         
__________________________________________________________________________________________________
conv5_block11_2_conv (Conv2D)   (None, 8, 8, 32)     36864       conv5_block11_1_relu[0][0]       
__________________________________________________________________________________________________
conv5_block11_concat (Concatena (None, 8, 8, 864)    0           conv5_block10_concat[0][0]       
                                                                 conv5_block11_2_conv[0][0]       
__________________________________________________________________________________________________
conv5_block12_0_bn (BatchNormal (None, 8, 8, 864)    3456        conv5_block11_concat[0][0]       
__________________________________________________________________________________________________
conv5_block12_0_relu (Activatio (None, 8, 8, 864)    0           conv5_block12_0_bn[0][0]         
__________________________________________________________________________________________________
conv5_block12_1_conv (Conv2D)   (None, 8, 8, 128)    110592      conv5_block12_0_relu[0][0]       
__________________________________________________________________________________________________
conv5_block12_1_bn (BatchNormal (None, 8, 8, 128)    512         conv5_block12_1_conv[0][0]       
__________________________________________________________________________________________________
conv5_block12_1_relu (Activatio (None, 8, 8, 128)    0           conv5_block12_1_bn[0][0]         
__________________________________________________________________________________________________
conv5_block12_2_conv (Conv2D)   (None, 8, 8, 32)     36864       conv5_block12_1_relu[0][0]       
__________________________________________________________________________________________________
conv5_block12_concat (Concatena (None, 8, 8, 896)    0           conv5_block11_concat[0][0]       
                                                                 conv5_block12_2_conv[0][0]       
__________________________________________________________________________________________________
conv5_block13_0_bn (BatchNormal (None, 8, 8, 896)    3584        conv5_block12_concat[0][0]       
__________________________________________________________________________________________________
conv5_block13_0_relu (Activatio (None, 8, 8, 896)    0           conv5_block13_0_bn[0][0]         
__________________________________________________________________________________________________
conv5_block13_1_conv (Conv2D)   (None, 8, 8, 128)    114688      conv5_block13_0_relu[0][0]       
__________________________________________________________________________________________________
conv5_block13_1_bn (BatchNormal (None, 8, 8, 128)    512         conv5_block13_1_conv[0][0]       
__________________________________________________________________________________________________
conv5_block13_1_relu (Activatio (None, 8, 8, 128)    0           conv5_block13_1_bn[0][0]         
__________________________________________________________________________________________________
conv5_block13_2_conv (Conv2D)   (None, 8, 8, 32)     36864       conv5_block13_1_relu[0][0]       
__________________________________________________________________________________________________
conv5_block13_concat (Concatena (None, 8, 8, 928)    0           conv5_block12_concat[0][0]       
                                                                 conv5_block13_2_conv[0][0]       
__________________________________________________________________________________________________
conv5_block14_0_bn (BatchNormal (None, 8, 8, 928)    3712        conv5_block13_concat[0][0]       
__________________________________________________________________________________________________
conv5_block14_0_relu (Activatio (None, 8, 8, 928)    0           conv5_block14_0_bn[0][0]         
__________________________________________________________________________________________________
conv5_block14_1_conv (Conv2D)   (None, 8, 8, 128)    118784      conv5_block14_0_relu[0][0]       
__________________________________________________________________________________________________
conv5_block14_1_bn (BatchNormal (None, 8, 8, 128)    512         conv5_block14_1_conv[0][0]       
__________________________________________________________________________________________________
conv5_block14_1_relu (Activatio (None, 8, 8, 128)    0           conv5_block14_1_bn[0][0]         
__________________________________________________________________________________________________
conv5_block14_2_conv (Conv2D)   (None, 8, 8, 32)     36864       conv5_block14_1_relu[0][0]       
__________________________________________________________________________________________________
conv5_block14_concat (Concatena (None, 8, 8, 960)    0           conv5_block13_concat[0][0]       
                                                                 conv5_block14_2_conv[0][0]       
__________________________________________________________________________________________________
conv5_block15_0_bn (BatchNormal (None, 8, 8, 960)    3840        conv5_block14_concat[0][0]       
__________________________________________________________________________________________________
conv5_block15_0_relu (Activatio (None, 8, 8, 960)    0           conv5_block15_0_bn[0][0]         
__________________________________________________________________________________________________
conv5_block15_1_conv (Conv2D)   (None, 8, 8, 128)    122880      conv5_block15_0_relu[0][0]       
__________________________________________________________________________________________________
conv5_block15_1_bn (BatchNormal (None, 8, 8, 128)    512         conv5_block15_1_conv[0][0]       
__________________________________________________________________________________________________
conv5_block15_1_relu (Activatio (None, 8, 8, 128)    0           conv5_block15_1_bn[0][0]         
__________________________________________________________________________________________________
conv5_block15_2_conv (Conv2D)   (None, 8, 8, 32)     36864       conv5_block15_1_relu[0][0]       
__________________________________________________________________________________________________
conv5_block15_concat (Concatena (None, 8, 8, 992)    0           conv5_block14_concat[0][0]       
                                                                 conv5_block15_2_conv[0][0]       
__________________________________________________________________________________________________
conv5_block16_0_bn (BatchNormal (None, 8, 8, 992)    3968        conv5_block15_concat[0][0]       
__________________________________________________________________________________________________
conv5_block16_0_relu (Activatio (None, 8, 8, 992)    0           conv5_block16_0_bn[0][0]         
__________________________________________________________________________________________________
conv5_block16_1_conv (Conv2D)   (None, 8, 8, 128)    126976      conv5_block16_0_relu[0][0]       
__________________________________________________________________________________________________
conv5_block16_1_bn (BatchNormal (None, 8, 8, 128)    512         conv5_block16_1_conv[0][0]       
__________________________________________________________________________________________________
conv5_block16_1_relu (Activatio (None, 8, 8, 128)    0           conv5_block16_1_bn[0][0]         
__________________________________________________________________________________________________
conv5_block16_2_conv (Conv2D)   (None, 8, 8, 32)     36864       conv5_block16_1_relu[0][0]       
__________________________________________________________________________________________________
conv5_block16_concat (Concatena (None, 8, 8, 1024)   0           conv5_block15_concat[0][0]       
                                                                 conv5_block16_2_conv[0][0]       
__________________________________________________________________________________________________
bn (BatchNormalization)         (None, 8, 8, 1024)   4096        conv5_block16_concat[0][0]       
__________________________________________________________________________________________________
relu (Activation)               (None, 8, 8, 1024)   0           bn[0][0]                         
__________________________________________________________________________________________________
conv2d (Conv2D)                 (None, 6, 6, 32)     294944      relu[0][0]                       
__________________________________________________________________________________________________
activation (Activation)         (None, 6, 6, 32)     0           conv2d[0][0]                     
__________________________________________________________________________________________________
max_pooling2d (MaxPooling2D)    (None, 3, 3, 32)     0           activation[0][0]                 
__________________________________________________________________________________________________
flatten (Flatten)               (None, 288)          0           max_pooling2d[0][0]              
__________________________________________________________________________________________________
dense (Dense)                   (None, 256)          73984       flatten[0][0]                    
__________________________________________________________________________________________________
dropout (Dropout)               (None, 256)          0           dense[0][0]                      
__________________________________________________________________________________________________
dense_1 (Dense)                 (None, 128)          32896       dropout[0][0]                    
__________________________________________________________________________________________________
dropout_1 (Dropout)             (None, 128)          0           dense_1[0][0]                    
__________________________________________________________________________________________________
dense_2 (Dense)                 (None, 1)            129         dropout_1[0][0]                  
==================================================================================================
Total params: 7,439,457
Trainable params: 401,953
Non-trainable params: 7,037,504
__________________________________________________________________________________________________
In [ ]:
from tensorflow.keras.utils import plot_model
plot_model(model1, 'model.png', show_shapes=True)
dot: graph is too large for cairo-renderer bitmaps. Scaling by 0.67963 to fit

Out[ ]:

Tensorboard, Checkpoint. Creating callback list

In [ ]:
# tensor-board in colab
# Refer: https://www.tensorflow.org/tensorboard/get_started
import os
import datetime

! rm -rf ./logs/ 
logdir = os.path.join("logs", datetime.datetime.now().strftime("%Y%m%d-%H%M%S"))
print(logdir)
logs/20210113-104620
In [ ]:
import os
os.mkdir("model_save")
In [ ]:
from tensorflow.keras.callbacks import ModelCheckpoint
from tensorflow.keras.callbacks import CSVLogger

filepath="model_save/"
checkpoints = ModelCheckpoint(filepath+'densenet121_weights-{epoch:02d}-{val_recall:.4f}.hdf5', monitor='val_accuracy', save_weights_only=True, verbose=1, save_best_only=True, mode='max')

train_log = CSVLogger(filepath+'history__densenet121_weights_tf_dim_ordering_tf_kernels_notop.log') #storing the training results in a pandas dataframe

tensorboard_callback = tf.keras.callbacks.TensorBoard(logdir, histogram_freq=1)

callbacks_list = [checkpoints, train_log, tensorboard_callback]
In [ ]:
%load_ext tensorboard
%tensorboard --logdir $logdir

Train the model

In [ ]:
history1 = model1.fit(train_ds, epochs=35, batch_size = 32, validation_data=val_ds, callbacks=callbacks_list, verbose = 1)
Epoch 1/35
535/535 [==============================] - 630s 856ms/step - loss: 0.5314 - accuracy: 0.7527 - precision: 0.3904 - recall: 0.1370 - val_loss: 0.4011 - val_accuracy: 0.8058 - val_precision: 0.7360 - val_recall: 0.1941

Epoch 00001: val_accuracy improved from -inf to 0.80580, saving model to model_save/densenet121_weights-01-0.1941.hdf5
Epoch 2/35
535/535 [==============================] - 120s 224ms/step - loss: 0.4069 - accuracy: 0.8111 - precision: 0.6517 - recall: 0.3474 - val_loss: 0.3611 - val_accuracy: 0.8348 - val_precision: 0.7123 - val_recall: 0.4283

Epoch 00002: val_accuracy improved from 0.80580 to 0.83482, saving model to model_save/densenet121_weights-02-0.4283.hdf5
Epoch 3/35
535/535 [==============================] - 121s 226ms/step - loss: 0.3677 - accuracy: 0.8356 - precision: 0.6977 - recall: 0.4742 - val_loss: 0.3380 - val_accuracy: 0.8545 - val_precision: 0.7587 - val_recall: 0.5042

Epoch 00003: val_accuracy improved from 0.83482 to 0.85447, saving model to model_save/densenet121_weights-03-0.5042.hdf5
Epoch 4/35
535/535 [==============================] - 121s 227ms/step - loss: 0.3423 - accuracy: 0.8496 - precision: 0.7271 - recall: 0.5298 - val_loss: 0.3085 - val_accuracy: 0.8666 - val_precision: 0.7981 - val_recall: 0.5338

Epoch 00004: val_accuracy improved from 0.85447 to 0.86664, saving model to model_save/densenet121_weights-04-0.5338.hdf5
Epoch 5/35
535/535 [==============================] - 121s 226ms/step - loss: 0.3001 - accuracy: 0.8703 - precision: 0.7611 - recall: 0.6161 - val_loss: 0.2807 - val_accuracy: 0.8821 - val_precision: 0.8384 - val_recall: 0.5802

Epoch 00005: val_accuracy improved from 0.86664 to 0.88208, saving model to model_save/densenet121_weights-05-0.5802.hdf5
Epoch 6/35
535/535 [==============================] - 121s 226ms/step - loss: 0.2587 - accuracy: 0.8953 - precision: 0.8075 - recall: 0.7005 - val_loss: 0.2695 - val_accuracy: 0.8877 - val_precision: 0.8503 - val_recall: 0.5992

Epoch 00006: val_accuracy improved from 0.88208 to 0.88769, saving model to model_save/densenet121_weights-06-0.5992.hdf5
Epoch 7/35
535/535 [==============================] - 121s 226ms/step - loss: 0.2175 - accuracy: 0.9109 - precision: 0.8386 - recall: 0.7471 - val_loss: 0.2394 - val_accuracy: 0.9059 - val_precision: 0.8337 - val_recall: 0.7194

Epoch 00007: val_accuracy improved from 0.88769 to 0.90594, saving model to model_save/densenet121_weights-07-0.7194.hdf5
Epoch 8/35
535/535 [==============================] - 121s 227ms/step - loss: 0.1766 - accuracy: 0.9357 - precision: 0.8815 - recall: 0.8242 - val_loss: 0.2327 - val_accuracy: 0.9083 - val_precision: 0.8294 - val_recall: 0.7384

Epoch 00008: val_accuracy improved from 0.90594 to 0.90828, saving model to model_save/densenet121_weights-08-0.7384.hdf5
Epoch 9/35
535/535 [==============================] - 121s 226ms/step - loss: 0.1303 - accuracy: 0.9503 - precision: 0.9049 - recall: 0.8704 - val_loss: 0.2116 - val_accuracy: 0.9219 - val_precision: 0.8287 - val_recall: 0.8165

Epoch 00009: val_accuracy improved from 0.90828 to 0.92185, saving model to model_save/densenet121_weights-09-0.8165.hdf5
Epoch 10/35
535/535 [==============================] - 121s 227ms/step - loss: 0.0954 - accuracy: 0.9614 - precision: 0.9259 - recall: 0.9004 - val_loss: 0.2445 - val_accuracy: 0.9242 - val_precision: 0.7955 - val_recall: 0.8861

Epoch 00010: val_accuracy improved from 0.92185 to 0.92419, saving model to model_save/densenet121_weights-10-0.8861.hdf5
Epoch 11/35
535/535 [==============================] - 121s 226ms/step - loss: 0.0682 - accuracy: 0.9764 - precision: 0.9528 - recall: 0.9415 - val_loss: 0.2271 - val_accuracy: 0.9321 - val_precision: 0.8270 - val_recall: 0.8776

Epoch 00011: val_accuracy improved from 0.92419 to 0.93215, saving model to model_save/densenet121_weights-11-0.8776.hdf5
Epoch 12/35
535/535 [==============================] - 121s 226ms/step - loss: 0.0623 - accuracy: 0.9758 - precision: 0.9567 - recall: 0.9346 - val_loss: 0.2467 - val_accuracy: 0.9261 - val_precision: 0.7904 - val_recall: 0.9072

Epoch 00012: val_accuracy did not improve from 0.93215
Epoch 13/35
535/535 [==============================] - 121s 225ms/step - loss: 0.0644 - accuracy: 0.9753 - precision: 0.9514 - recall: 0.9379 - val_loss: 0.2609 - val_accuracy: 0.9195 - val_precision: 0.7849 - val_recall: 0.8776

Epoch 00013: val_accuracy did not improve from 0.93215
Epoch 14/35
535/535 [==============================] - 119s 223ms/step - loss: 0.0560 - accuracy: 0.9829 - precision: 0.9643 - recall: 0.9590 - val_loss: 0.3186 - val_accuracy: 0.9120 - val_precision: 0.7474 - val_recall: 0.9114

Epoch 00014: val_accuracy did not improve from 0.93215
Epoch 15/35
535/535 [==============================] - 119s 223ms/step - loss: 0.0413 - accuracy: 0.9858 - precision: 0.9729 - recall: 0.9633 - val_loss: 0.3447 - val_accuracy: 0.9059 - val_precision: 0.7333 - val_recall: 0.9051

Epoch 00015: val_accuracy did not improve from 0.93215
Epoch 16/35
535/535 [==============================] - 121s 226ms/step - loss: 0.0437 - accuracy: 0.9837 - precision: 0.9628 - recall: 0.9645 - val_loss: 0.2482 - val_accuracy: 0.9396 - val_precision: 0.8808 - val_recall: 0.8418

Epoch 00016: val_accuracy improved from 0.93215 to 0.93963, saving model to model_save/densenet121_weights-16-0.8418.hdf5
Epoch 17/35
535/535 [==============================] - 121s 226ms/step - loss: 0.0409 - accuracy: 0.9842 - precision: 0.9720 - recall: 0.9574 - val_loss: 0.2790 - val_accuracy: 0.9345 - val_precision: 0.8212 - val_recall: 0.9008

Epoch 00017: val_accuracy did not improve from 0.93963
Epoch 18/35
535/535 [==============================] - 121s 226ms/step - loss: 0.0282 - accuracy: 0.9898 - precision: 0.9785 - recall: 0.9760 - val_loss: 0.2869 - val_accuracy: 0.9307 - val_precision: 0.7974 - val_recall: 0.9219

Epoch 00018: val_accuracy did not improve from 0.93963
Epoch 19/35
535/535 [==============================] - 121s 226ms/step - loss: 0.0296 - accuracy: 0.9899 - precision: 0.9773 - recall: 0.9776 - val_loss: 0.3382 - val_accuracy: 0.9219 - val_precision: 0.7688 - val_recall: 0.9262

Epoch 00019: val_accuracy did not improve from 0.93963
Epoch 20/35
535/535 [==============================] - 121s 226ms/step - loss: 0.0311 - accuracy: 0.9887 - precision: 0.9774 - recall: 0.9722 - val_loss: 0.3347 - val_accuracy: 0.9289 - val_precision: 0.7960 - val_recall: 0.9135

Epoch 00020: val_accuracy did not improve from 0.93963
Epoch 21/35
535/535 [==============================] - 121s 226ms/step - loss: 0.0275 - accuracy: 0.9884 - precision: 0.9756 - recall: 0.9727 - val_loss: 0.4815 - val_accuracy: 0.8956 - val_precision: 0.7014 - val_recall: 0.9219

Epoch 00021: val_accuracy did not improve from 0.93963
Epoch 22/35
535/535 [==============================] - 120s 225ms/step - loss: 0.0303 - accuracy: 0.9893 - precision: 0.9752 - recall: 0.9777 - val_loss: 0.2557 - val_accuracy: 0.9509 - val_precision: 0.8742 - val_recall: 0.9093

Epoch 00022: val_accuracy improved from 0.93963 to 0.95087, saving model to model_save/densenet121_weights-22-0.9093.hdf5
Epoch 23/35
535/535 [==============================] - 120s 225ms/step - loss: 0.0207 - accuracy: 0.9915 - precision: 0.9774 - recall: 0.9850 - val_loss: 0.3535 - val_accuracy: 0.9307 - val_precision: 0.8159 - val_recall: 0.8882

Epoch 00023: val_accuracy did not improve from 0.95087
Epoch 24/35
535/535 [==============================] - 120s 225ms/step - loss: 0.0187 - accuracy: 0.9936 - precision: 0.9846 - recall: 0.9870 - val_loss: 0.3197 - val_accuracy: 0.9443 - val_precision: 0.8586 - val_recall: 0.8966

Epoch 00024: val_accuracy did not improve from 0.95087
Epoch 25/35
535/535 [==============================] - 120s 225ms/step - loss: 0.0274 - accuracy: 0.9905 - precision: 0.9799 - recall: 0.9778 - val_loss: 0.3064 - val_accuracy: 0.9457 - val_precision: 0.9087 - val_recall: 0.8397

Epoch 00025: val_accuracy did not improve from 0.95087
Epoch 26/35
535/535 [==============================] - 120s 225ms/step - loss: 0.0256 - accuracy: 0.9918 - precision: 0.9797 - recall: 0.9838 - val_loss: 0.3473 - val_accuracy: 0.9242 - val_precision: 0.7746 - val_recall: 0.9283

Epoch 00026: val_accuracy did not improve from 0.95087
Epoch 27/35
535/535 [==============================] - 120s 224ms/step - loss: 0.0197 - accuracy: 0.9931 - precision: 0.9846 - recall: 0.9848 - val_loss: 0.3750 - val_accuracy: 0.9429 - val_precision: 0.8534 - val_recall: 0.8966

Epoch 00027: val_accuracy did not improve from 0.95087
Epoch 28/35
535/535 [==============================] - 120s 224ms/step - loss: 0.0175 - accuracy: 0.9933 - precision: 0.9870 - recall: 0.9830 - val_loss: 0.3395 - val_accuracy: 0.9392 - val_precision: 0.8197 - val_recall: 0.9304

Epoch 00028: val_accuracy did not improve from 0.95087
Epoch 29/35
535/535 [==============================] - 120s 224ms/step - loss: 0.0158 - accuracy: 0.9951 - precision: 0.9895 - recall: 0.9887 - val_loss: 0.3722 - val_accuracy: 0.9345 - val_precision: 0.8093 - val_recall: 0.9219

Epoch 00029: val_accuracy did not improve from 0.95087
Epoch 30/35
535/535 [==============================] - 120s 224ms/step - loss: 0.0200 - accuracy: 0.9938 - precision: 0.9847 - recall: 0.9876 - val_loss: 0.3751 - val_accuracy: 0.9340 - val_precision: 0.8184 - val_recall: 0.9030

Epoch 00030: val_accuracy did not improve from 0.95087
Epoch 31/35
535/535 [==============================] - 120s 224ms/step - loss: 0.0276 - accuracy: 0.9912 - precision: 0.9805 - recall: 0.9805 - val_loss: 0.3570 - val_accuracy: 0.9392 - val_precision: 0.8510 - val_recall: 0.8797

Epoch 00031: val_accuracy did not improve from 0.95087
Epoch 32/35
535/535 [==============================] - 119s 222ms/step - loss: 0.0230 - accuracy: 0.9920 - precision: 0.9825 - recall: 0.9821 - val_loss: 0.3165 - val_accuracy: 0.9518 - val_precision: 0.8989 - val_recall: 0.8819

Epoch 00032: val_accuracy improved from 0.95087 to 0.95180, saving model to model_save/densenet121_weights-32-0.8819.hdf5
Epoch 33/35
535/535 [==============================] - 120s 224ms/step - loss: 0.0154 - accuracy: 0.9956 - precision: 0.9905 - recall: 0.9903 - val_loss: 0.3062 - val_accuracy: 0.9569 - val_precision: 0.8996 - val_recall: 0.9072

Epoch 00033: val_accuracy improved from 0.95180 to 0.95695, saving model to model_save/densenet121_weights-33-0.9072.hdf5
Epoch 34/35
535/535 [==============================] - 120s 224ms/step - loss: 0.0167 - accuracy: 0.9948 - precision: 0.9881 - recall: 0.9887 - val_loss: 0.3507 - val_accuracy: 0.9420 - val_precision: 0.8302 - val_recall: 0.9283

Epoch 00034: val_accuracy did not improve from 0.95695
Epoch 35/35
535/535 [==============================] - 120s 224ms/step - loss: 0.0198 - accuracy: 0.9938 - precision: 0.9857 - recall: 0.9868 - val_loss: 0.2978 - val_accuracy: 0.9490 - val_precision: 0.8558 - val_recall: 0.9262

Epoch 00035: val_accuracy did not improve from 0.95695
In [ ]:
from IPython.display import Image
Image(url='https://imgur.com/akazboj.png')
Out[ ]:

Evaluate the model

In [ ]:
_ = model1.evaluate(train_ds)
_ = model1.evaluate(val_ds)
535/535 [==============================] - 93s 175ms/step - loss: 0.0305 - accuracy: 0.9884 - precision: 0.9528 - recall: 0.9968
134/134 [==============================] - 23s 174ms/step - loss: 0.2978 - accuracy: 0.9490 - precision: 0.8558 - recall: 0.9262
In [ ]:
history_dict = history1.history
history_dict.keys()
Out[ ]:
dict_keys(['loss', 'accuracy', 'precision', 'recall', 'val_loss', 'val_accuracy', 'val_precision', 'val_recall'])
In [ ]:
fig, axs = plt.subplots(2, 2, figsize=(10, 10))
axs = axs.ravel()

for i, met in enumerate(['loss', 'accuracy', 'precision', 'recall']):
    axs[i].plot(history1.history[met])
    axs[i].plot(history1.history['val_' + met])
    axs[i].set_title(f'Model {met}')
    axs[i].set_xlabel('epochs')
    axs[i].set_ylabel(met)
    axs[i].legend(['training', 'validation'])
plt.tight_layout()

Checking confusion Matrix on train Data with different thresholds

In [ ]:
from sklearn.metrics import confusion_matrix
import seaborn as sns
def confusion_mat(test_y,predict_y):
  '''' Function to Visualize the Confusion Matrix'''
  labels = [0,1]
  plt.figure(figsize=(6,6))
  cmap=sns.light_palette("blue")
  C = confusion_matrix(test_y, predict_y)
  print("Percentage of misclassified points ",(len(test_y)-np.trace(C))/len(test_y)*100)
  sns.heatmap(C, cmap="Blues",annot=True,annot_kws={"size": 16},fmt='g')
  plt.xlabel('Predicted Class')
  plt.ylabel('Original Class')
  plt.title('Confusion matrix')
  plt.show()
In [ ]:
y_pred_1=[] #array to store predicted label
y_true=[] #array to store the ground truth
for i,j in tqdm(train_ds.take(8540)):
  y_pred_1.extend(model1.predict(i)) #predicting each batch
  y_true.extend(j)
y_pred=[]
for i in y_pred_1: #the values are in probabilities and hence we are going to classify based on a custom threshold (0.5 is the default threshold)
  if i[0]>=0.5: #setting threshold
    y_pred.append(1)
  else:
    y_pred.append(0)
y_true=np.array(y_true) #converting the array for into numpy
y_pred=np.array(y_pred).reshape(1,-1)
y_pred=y_pred[0]
confusion_mat(y_true,y_pred)
100%|██████████| 535/535 [01:56<00:00,  4.60it/s]
Percentage of misclassified points  1.158165652784277
In [ ]:
from sklearn.metrics import accuracy_score, precision_score, recall_score

# accuracy_score(y_true, y_pred, normalize=True)
print("Accuracy Score : ",accuracy_score(np.array(y_true),y_pred))
print("Precision Score : ",precision_score(np.array(y_true),y_pred))
print("Recall Score : ",recall_score(np.array(y_true),y_pred))
Accuracy Score :  0.9884183434721572
Precision Score :  0.9527679024885729
Recall Score :  0.9968119022316685
In [ ]:
y_pred = []
for i in y_pred_1: #the values are in probabilities and hence we are going to classify based on a custom threshold (0.5 is the default threshold)
  if i[0]>=0.6: #setting threshold
    y_pred.append(1)
  else:
    y_pred.append(0)
y_true=np.array(y_true) #converting the array for into numpy
y_pred=np.array(y_pred).reshape(1,-1)
y_pred=y_pred[0]
confusion_mat(y_true,y_pred)
Percentage of misclassified points  0.959288722508189
In [ ]:
from sklearn.metrics import accuracy_score, precision_score, recall_score

# accuracy_score(y_true, y_pred, normalize=True)
print("Accuracy Score : ",accuracy_score(np.array(y_true),y_pred))
print("Precision Score : ",precision_score(np.array(y_true),y_pred))
print("Recall Score : ",recall_score(np.array(y_true),y_pred))
Accuracy Score :  0.9904071127749181
Precision Score :  0.9634397528321318
Recall Score :  0.9941551540913921
In [ ]:
y_pred = []

for i in y_pred_1: #the values are in probabilities and hence we are going to classify based on a custom threshold (0.5 is the default threshold)
  if i[0]>=0.7: #setting threshold
    y_pred.append(1)
  else:
    y_pred.append(0)
y_true=np.array(y_true) #converting the array for into numpy
y_pred=np.array(y_pred).reshape(1,-1)
y_pred=y_pred[0]
confusion_mat(y_true,y_pred)
Percentage of misclassified points  0.8656995788488535
In [ ]:
from sklearn.metrics import accuracy_score, precision_score, recall_score

# accuracy_score(y_true, y_pred, normalize=True)
print("Accuracy Score : ",accuracy_score(np.array(y_true),y_pred))
print("Precision Score : ",precision_score(np.array(y_true),y_pred))
print("Recall Score : ",recall_score(np.array(y_true),y_pred))
Accuracy Score :  0.9913430042115114
Precision Score :  0.9683937823834197
Recall Score :  0.9930924548352816
In [ ]:
y_pred = []

for i in y_pred_1: #the values are in probabilities and hence we are going to classify based on a custom threshold (0.5 is the default threshold)
  if i[0]>=0.8: #setting threshold
    y_pred.append(1)
  else:
    y_pred.append(0)
y_true=np.array(y_true) #converting the array for into numpy
y_pred=np.array(y_pred).reshape(1,-1)
y_pred=y_pred[0]
confusion_mat(y_true,y_pred)
Percentage of misclassified points  0.7370145063172672
In [ ]:
from sklearn.metrics import accuracy_score, precision_score, recall_score

# accuracy_score(y_true, y_pred, normalize=True)
print("Accuracy Score : ",accuracy_score(np.array(y_true),y_pred))
print("Precision Score : ",precision_score(np.array(y_true),y_pred))
print("Recall Score : ",recall_score(np.array(y_true),y_pred))
Accuracy Score :  0.9926298549368273
Precision Score :  0.9759288330716902
Recall Score :  0.9909670563230606
In [ ]:
y_pred = []

for i in y_pred_1: #the values are in probabilities and hence we are going to classify based on a custom threshold (0.5 is the default threshold)
  if i[0]>=0.9: #setting threshold
    y_pred.append(1)
  else:
    y_pred.append(0)
y_true=np.array(y_true) #converting the array for into numpy
y_pred=np.array(y_pred).reshape(1,-1)
y_pred=y_pred[0]
confusion_mat(y_true,y_pred)
Percentage of misclassified points  0.6317267197005147
In [ ]:
from sklearn.metrics import accuracy_score, precision_score, recall_score

# accuracy_score(y_true, y_pred, normalize=True)
print("Accuracy Score : ",accuracy_score(np.array(y_true),y_pred))
print("Precision Score : ",precision_score(np.array(y_true),y_pred))
print("Recall Score : ",recall_score(np.array(y_true),y_pred))
Accuracy Score :  0.9936827328029949
Precision Score :  0.9835978835978836
Recall Score :  0.987778958554729

Checking confusion Matrix on test Data with different thresholds

In [ ]:
y_pred_1=[] #array to store predicted label
y_true=[] #array to store the ground truth
for i,j in tqdm(val_ds.take(2135)):
  y_pred_1.extend(model1.predict(i)) #predicting each batch
  y_true.extend(j)
y_pred=[]
for i in y_pred_1: #the values are in probabilities and hence we are going to classify based on a custom threshold (0.5 is the default threshold)
  if i[0]>=0.5: #setting threshold
    y_pred.append(1)
  else:
    y_pred.append(0)
y_true=np.array(y_true) #converting the array for into numpy
y_pred=np.array(y_pred).reshape(1,-1)
y_pred=y_pred[0]
confusion_mat(y_true,y_pred)
100%|██████████| 134/134 [00:29<00:00,  4.62it/s]
Percentage of misclassified points  5.100608329433785
In [ ]:
from sklearn.metrics import accuracy_score, precision_score, recall_score

# accuracy_score(y_true, y_pred, normalize=True)
print("Accuracy Score : ",accuracy_score(np.array(y_true),y_pred))
print("Precision Score : ",precision_score(np.array(y_true),y_pred))
print("Recall Score : ",recall_score(np.array(y_true),y_pred))
Accuracy Score :  0.9489939167056621
Precision Score :  0.8557504873294347
Recall Score :  0.9261603375527426
In [ ]:
y_pred = []

for i in y_pred_1: #the values are in probabilities and hence we are going to classify based on a custom threshold (0.5 is the default threshold)
  if i[0]>=0.6: #setting threshold
    y_pred.append(1)
  else:
    y_pred.append(0)
y_true=np.array(y_true) #converting the array for into numpy
y_pred=np.array(y_pred).reshape(1,-1)
y_pred=y_pred[0]
confusion_mat(y_true,y_pred)
Percentage of misclassified points  4.632662611137108
In [ ]:
from sklearn.metrics import accuracy_score, precision_score, recall_score

# accuracy_score(y_true, y_pred, normalize=True)
print("Accuracy Score : ",accuracy_score(np.array(y_true),y_pred))
print("Precision Score : ",precision_score(np.array(y_true),y_pred))
print("Recall Score : ",recall_score(np.array(y_true),y_pred))
Accuracy Score :  0.9536733738886289
Precision Score :  0.874251497005988
Recall Score :  0.9240506329113924
In [ ]:
y_pred = []

for i in y_pred_1: #the values are in probabilities and hence we are going to classify based on a custom threshold (0.5 is the default threshold)
  if i[0]>=0.7: #setting threshold
    y_pred.append(1)
  else:
    y_pred.append(0)
y_true=np.array(y_true) #converting the array for into numpy
y_pred=np.array(y_pred).reshape(1,-1)
y_pred=y_pred[0]
confusion_mat(y_true,y_pred)
Percentage of misclassified points  4.305100608329434
In [ ]:
from sklearn.metrics import accuracy_score, precision_score, recall_score

# accuracy_score(y_true, y_pred, normalize=True)
print("Accuracy Score : ",accuracy_score(np.array(y_true),y_pred))
print("Precision Score : ",precision_score(np.array(y_true),y_pred))
print("Recall Score : ",recall_score(np.array(y_true),y_pred))
Accuracy Score :  0.9569489939167056
Precision Score :  0.8882113821138211
Recall Score :  0.9219409282700421
In [ ]:
y_pred = []

for i in y_pred_1: #the values are in probabilities and hence we are going to classify based on a custom threshold (0.5 is the default threshold)
  if i[0]>=0.8: #setting threshold
    y_pred.append(1)
  else:
    y_pred.append(0)
y_true=np.array(y_true) #converting the array for into numpy
y_pred=np.array(y_pred).reshape(1,-1)
y_pred=y_pred[0]
confusion_mat(y_true,y_pred)
Percentage of misclassified points  4.305100608329434
In [ ]:
from sklearn.metrics import accuracy_score, precision_score, recall_score

# accuracy_score(y_true, y_pred, normalize=True)
print("Accuracy Score : ",accuracy_score(np.array(y_true),y_pred))
print("Precision Score : ",precision_score(np.array(y_true),y_pred))
print("Recall Score : ",recall_score(np.array(y_true),y_pred))
Accuracy Score :  0.9569489939167056
Precision Score :  0.8962655601659751
Recall Score :  0.9113924050632911
In [ ]:
y_pred = []

for i in y_pred_1: #the values are in probabilities and hence we are going to classify based on a custom threshold (0.5 is the default threshold)
  if i[0]>=0.9: #setting threshold
    y_pred.append(1)
  else:
    y_pred.append(0)
y_true=np.array(y_true) #converting the array for into numpy
y_pred=np.array(y_pred).reshape(1,-1)
y_pred=y_pred[0]
confusion_mat(y_true,y_pred)
Percentage of misclassified points  4.492278895648105
In [ ]:
from sklearn.metrics import accuracy_score, precision_score, recall_score

# accuracy_score(y_true, y_pred, normalize=True)
print("Accuracy Score : ",accuracy_score(np.array(y_true),y_pred))
print("Precision Score : ",precision_score(np.array(y_true),y_pred))
print("Recall Score : ",recall_score(np.array(y_true),y_pred))
Accuracy Score :  0.955077211043519
Precision Score :  0.9038461538461539
Recall Score :  0.8924050632911392
In [ ]:
model1.save('/content/drive/My Drive/siim-acr-pneumothorax/pneumothorax/densenet121_weights-33-0.9072.hdf5')
In [ ]:
model1.save('densenet121_weights-33-0.9072.hdf5')
In [ ]:
model1 = load_model('densenet121_weights-33-0.9072.hdf5')
In [ ]:
y_pred_1=[] #array to store predicted label
y_true=[] #array to store the ground truth
for i,j in tqdm(val_ds.take(2135)):
  y_pred_1.extend(model1.predict(i)) #predicting each batch
  y_true.extend(j)
y_pred=[]
for i in y_pred_1: #the values are in probabilities and hence we are going to classify based on a custom threshold (0.5 is the default threshold)
  if i[0]>=0.5: #setting threshold
    y_pred.append(1)
  else:
    y_pred.append(0)
y_true=np.array(y_true) #converting the array for into numpy
y_pred=np.array(y_pred).reshape(1,-1)
y_pred=y_pred[0]
confusion_mat(y_true,y_pred)
100%|██████████| 134/134 [00:30<00:00,  4.33it/s]
Percentage of misclassified points  5.100608329433785
In [ ]:
from sklearn.metrics import accuracy_score, precision_score, recall_score

# accuracy_score(y_true, y_pred, normalize=True)
print("Accuracy Score : ",accuracy_score(np.array(y_true),y_pred))
print("Precision Score : ",precision_score(np.array(y_true),y_pred))
print("Recall Score : ",recall_score(np.array(y_true),y_pred))
Accuracy Score :  0.9489939167056621
Precision Score :  0.8557504873294347
Recall Score :  0.9261603375527426
In [ ]:
y_pred = []

for i in y_pred_1: #the values are in probabilities and hence we are going to classify based on a custom threshold (0.5 is the default threshold)
  if i[0]>=0.6: #setting threshold
    y_pred.append(1)
  else:
    y_pred.append(0)
y_true=np.array(y_true) #converting the array for into numpy
y_pred=np.array(y_pred).reshape(1,-1)
y_pred=y_pred[0]
confusion_mat(y_true,y_pred)
Percentage of misclassified points  4.632662611137108
In [ ]:
from sklearn.metrics import accuracy_score, precision_score, recall_score

# accuracy_score(y_true, y_pred, normalize=True)
print("Accuracy Score : ",accuracy_score(np.array(y_true),y_pred))
print("Precision Score : ",precision_score(np.array(y_true),y_pred))
print("Recall Score : ",recall_score(np.array(y_true),y_pred))
Accuracy Score :  0.9536733738886289
Precision Score :  0.874251497005988
Recall Score :  0.9240506329113924
In [ ]:
y_pred = []

for i in y_pred_1: #the values are in probabilities and hence we are going to classify based on a custom threshold (0.5 is the default threshold)
  if i[0]>=0.7: #setting threshold
    y_pred.append(1)
  else:
    y_pred.append(0)
y_true=np.array(y_true) #converting the array for into numpy
y_pred=np.array(y_pred).reshape(1,-1)
y_pred=y_pred[0]
confusion_mat(y_true,y_pred)
Percentage of misclassified points  4.305100608329434
In [ ]:
from sklearn.metrics import accuracy_score, precision_score, recall_score

# accuracy_score(y_true, y_pred, normalize=True)
print("Accuracy Score : ",accuracy_score(np.array(y_true),y_pred))
print("Precision Score : ",precision_score(np.array(y_true),y_pred))
print("Recall Score : ",recall_score(np.array(y_true),y_pred))
Accuracy Score :  0.9569489939167056
Precision Score :  0.8882113821138211
Recall Score :  0.9219409282700421
In [ ]:
y_pred = []

for i in y_pred_1: #the values are in probabilities and hence we are going to classify based on a custom threshold (0.5 is the default threshold)
  if i[0]>=0.8: #setting threshold
    y_pred.append(1)
  else:
    y_pred.append(0)
y_true=np.array(y_true) #converting the array for into numpy
y_pred=np.array(y_pred).reshape(1,-1)
y_pred=y_pred[0]
confusion_mat(y_true,y_pred)
Percentage of misclassified points  4.305100608329434
In [ ]:
from sklearn.metrics import accuracy_score, precision_score, recall_score

# accuracy_score(y_true, y_pred, normalize=True)
print("Accuracy Score : ",accuracy_score(np.array(y_true),y_pred))
print("Precision Score : ",precision_score(np.array(y_true),y_pred))
print("Recall Score : ",recall_score(np.array(y_true),y_pred))
Accuracy Score :  0.9569489939167056
Precision Score :  0.8962655601659751
Recall Score :  0.9113924050632911
In [ ]:
y_pred = []

for i in y_pred_1: #the values are in probabilities and hence we are going to classify based on a custom threshold (0.5 is the default threshold)
  if i[0]>=0.9: #setting threshold
    y_pred.append(1)
  else:
    y_pred.append(0)
y_true=np.array(y_true) #converting the array for into numpy
y_pred=np.array(y_pred).reshape(1,-1)
y_pred=y_pred[0]
confusion_mat(y_true,y_pred)
Percentage of misclassified points  4.492278895648105
In [ ]:
from sklearn.metrics import accuracy_score, precision_score, recall_score

# accuracy_score(y_true, y_pred, normalize=True)
print("Accuracy Score : ",accuracy_score(np.array(y_true),y_pred))
print("Precision Score : ",precision_score(np.array(y_true),y_pred))
print("Recall Score : ",recall_score(np.array(y_true),y_pred))
Accuracy Score :  0.955077211043519
Precision Score :  0.9038461538461539
Recall Score :  0.8924050632911392
In [ ]: